From e4b20cc6f7c31f48ddd0de5bcd054b09a35cd510 Mon Sep 17 00:00:00 2001
From: sg481n
Date: Wed, 23 Aug 2017 16:30:52 -0400
Subject: Update project structure to org.onap
Update project structure of dmaap/datarouter from
com.att to org.onap and add distribution management
and nexus repositires details.
Issue-id: DMAAP-52
Change-Id: Ibafee1cba43c7c5a3f227a02417998d36ecaae6b
Signed-off-by: sg481n
---
datarouter-prov/pom.xml | 131 ++--
.../datarouter/authz/AuthorizationResponse.java | 58 --
.../authz/AuthorizationResponseSupplement.java | 52 --
.../att/research/datarouter/authz/Authorizer.java | 62 --
.../datarouter/authz/impl/AuthRespImpl.java | 97 ---
.../authz/impl/AuthRespSupplementImpl.java | 71 --
.../datarouter/authz/impl/AuthzResource.java | 100 ---
.../datarouter/authz/impl/ProvAuthorizer.java | 179 -----
.../datarouter/authz/impl/ProvDataProvider.java | 66 --
.../research/datarouter/authz/impl/package.html | 68 --
.../com/att/research/datarouter/authz/package.html | 38 -
.../datarouter/provisioning/BaseServlet.java | 869 ---------------------
.../datarouter/provisioning/DRFeedsServlet.java | 300 -------
.../datarouter/provisioning/FeedLogServlet.java | 38 -
.../datarouter/provisioning/FeedServlet.java | 362 ---------
.../datarouter/provisioning/GroupServlet.java | 386 ---------
.../datarouter/provisioning/InternalServlet.java | 506 ------------
.../datarouter/provisioning/LogServlet.java | 433 ----------
.../att/research/datarouter/provisioning/Main.java | 245 ------
.../research/datarouter/provisioning/Poker.java | 318 --------
.../datarouter/provisioning/ProxyServlet.java | 304 -------
.../datarouter/provisioning/PublishServlet.java | 192 -----
.../datarouter/provisioning/RouteServlet.java | 429 ----------
.../datarouter/provisioning/StatisticsServlet.java | 588 --------------
.../datarouter/provisioning/SubLogServlet.java | 39 -
.../datarouter/provisioning/SubscribeServlet.java | 288 -------
.../provisioning/SubscriptionServlet.java | 476 -----------
.../datarouter/provisioning/SynchronizerTask.java | 614 ---------------
.../provisioning/beans/BaseLogRecord.java | 184 -----
.../datarouter/provisioning/beans/Deleteable.java | 41 -
.../provisioning/beans/DeliveryExtraRecord.java | 68 --
.../provisioning/beans/DeliveryRecord.java | 137 ----
.../datarouter/provisioning/beans/EgressRoute.java | 227 ------
.../provisioning/beans/EventLogRecord.java | 84 --
.../provisioning/beans/ExpiryRecord.java | 141 ----
.../datarouter/provisioning/beans/Feed.java | 760 ------------------
.../provisioning/beans/FeedAuthorization.java | 96 ---
.../provisioning/beans/FeedEndpointID.java | 87 ---
.../datarouter/provisioning/beans/FeedLinks.java | 103 ---
.../datarouter/provisioning/beans/Group.java | 417 ----------
.../provisioning/beans/IngressRoute.java | 542 -------------
.../datarouter/provisioning/beans/Insertable.java | 41 -
.../datarouter/provisioning/beans/JSONable.java | 40 -
.../datarouter/provisioning/beans/LOGJSONable.java | 40 -
.../datarouter/provisioning/beans/Loadable.java | 65 --
.../datarouter/provisioning/beans/LogRecord.java | 235 ------
.../provisioning/beans/NetworkRoute.java | 230 ------
.../datarouter/provisioning/beans/NodeClass.java | 179 -----
.../datarouter/provisioning/beans/Parameters.java | 257 ------
.../provisioning/beans/PubFailRecord.java | 85 --
.../provisioning/beans/PublishRecord.java | 153 ----
.../datarouter/provisioning/beans/SubDelivery.java | 109 ---
.../datarouter/provisioning/beans/SubLinks.java | 95 ---
.../provisioning/beans/Subscription.java | 511 ------------
.../datarouter/provisioning/beans/Syncable.java | 57 --
.../datarouter/provisioning/beans/Updateable.java | 40 -
.../datarouter/provisioning/beans/package.html | 31 -
.../datarouter/provisioning/eelf/EelfMsgs.java | 56 --
.../datarouter/provisioning/eelf/JettyFilter.java | 38 -
.../research/datarouter/provisioning/package.html | 123 ---
.../research/datarouter/provisioning/utils/DB.java | 711 -----------------
.../datarouter/provisioning/utils/DRRouteCLI.java | 456 -----------
.../provisioning/utils/JSONUtilities.java | 76 --
.../provisioning/utils/LogfileLoader.java | 549 -------------
.../provisioning/utils/PurgeLogDirTask.java | 70 --
.../datarouter/provisioning/utils/RLEBitSet.java | 418 ----------
.../provisioning/utils/ThrottleFilter.java | 316 --------
.../provisioning/utils/URLUtilities.java | 130 ---
.../datarouter/provisioning/utils/package.html | 30 -
.../datarouter/reports/DailyLatencyReport.java | 194 -----
.../research/datarouter/reports/FeedReport.java | 395 ----------
.../research/datarouter/reports/LatencyReport.java | 179 -----
.../att/research/datarouter/reports/Report.java | 155 ----
.../research/datarouter/reports/ReportBase.java | 63 --
.../datarouter/reports/SubscriberReport.java | 157 ----
.../research/datarouter/reports/VolumeReport.java | 140 ----
.../att/research/datarouter/reports/package.html | 43 -
.../datarouter/authz/AuthorizationResponse.java | 58 ++
.../authz/AuthorizationResponseSupplement.java | 52 ++
.../onap/dmaap/datarouter/authz/Authorizer.java | 62 ++
.../dmaap/datarouter/authz/impl/AuthRespImpl.java | 97 +++
.../authz/impl/AuthRespSupplementImpl.java | 71 ++
.../dmaap/datarouter/authz/impl/AuthzResource.java | 100 +++
.../datarouter/authz/impl/ProvAuthorizer.java | 178 +++++
.../datarouter/authz/impl/ProvDataProvider.java | 66 ++
.../onap/dmaap/datarouter/authz/impl/package.html | 68 ++
.../org/onap/dmaap/datarouter/authz/package.html | 38 +
.../dmaap/datarouter/provisioning/BaseServlet.java | 868 ++++++++++++++++++++
.../datarouter/provisioning/DRFeedsServlet.java | 300 +++++++
.../datarouter/provisioning/FeedLogServlet.java | 38 +
.../dmaap/datarouter/provisioning/FeedServlet.java | 362 +++++++++
.../datarouter/provisioning/GroupServlet.java | 385 +++++++++
.../datarouter/provisioning/InternalServlet.java | 506 ++++++++++++
.../dmaap/datarouter/provisioning/LogServlet.java | 433 ++++++++++
.../onap/dmaap/datarouter/provisioning/Main.java | 244 ++++++
.../onap/dmaap/datarouter/provisioning/Poker.java | 317 ++++++++
.../datarouter/provisioning/ProxyServlet.java | 303 +++++++
.../datarouter/provisioning/PublishServlet.java | 192 +++++
.../datarouter/provisioning/RouteServlet.java | 428 ++++++++++
.../datarouter/provisioning/StatisticsServlet.java | 588 ++++++++++++++
.../datarouter/provisioning/SubLogServlet.java | 39 +
.../datarouter/provisioning/SubscribeServlet.java | 288 +++++++
.../provisioning/SubscriptionServlet.java | 476 +++++++++++
.../datarouter/provisioning/SynchronizerTask.java | 613 +++++++++++++++
.../provisioning/beans/BaseLogRecord.java | 184 +++++
.../datarouter/provisioning/beans/Deleteable.java | 41 +
.../provisioning/beans/DeliveryExtraRecord.java | 68 ++
.../provisioning/beans/DeliveryRecord.java | 137 ++++
.../datarouter/provisioning/beans/EgressRoute.java | 226 ++++++
.../provisioning/beans/EventLogRecord.java | 84 ++
.../provisioning/beans/ExpiryRecord.java | 141 ++++
.../dmaap/datarouter/provisioning/beans/Feed.java | 759 ++++++++++++++++++
.../provisioning/beans/FeedAuthorization.java | 96 +++
.../provisioning/beans/FeedEndpointID.java | 87 +++
.../datarouter/provisioning/beans/FeedLinks.java | 103 +++
.../dmaap/datarouter/provisioning/beans/Group.java | 416 ++++++++++
.../provisioning/beans/IngressRoute.java | 541 +++++++++++++
.../datarouter/provisioning/beans/Insertable.java | 41 +
.../datarouter/provisioning/beans/JSONable.java | 40 +
.../datarouter/provisioning/beans/LOGJSONable.java | 40 +
.../datarouter/provisioning/beans/Loadable.java | 65 ++
.../datarouter/provisioning/beans/LogRecord.java | 235 ++++++
.../provisioning/beans/NetworkRoute.java | 229 ++++++
.../datarouter/provisioning/beans/NodeClass.java | 178 +++++
.../datarouter/provisioning/beans/Parameters.java | 256 ++++++
.../provisioning/beans/PubFailRecord.java | 85 ++
.../provisioning/beans/PublishRecord.java | 153 ++++
.../datarouter/provisioning/beans/SubDelivery.java | 109 +++
.../datarouter/provisioning/beans/SubLinks.java | 95 +++
.../provisioning/beans/Subscription.java | 511 ++++++++++++
.../datarouter/provisioning/beans/Syncable.java | 57 ++
.../datarouter/provisioning/beans/Updateable.java | 40 +
.../datarouter/provisioning/beans/package.html | 31 +
.../datarouter/provisioning/eelf/EelfMsgs.java | 56 ++
.../datarouter/provisioning/eelf/JettyFilter.java | 38 +
.../dmaap/datarouter/provisioning/package.html | 123 +++
.../dmaap/datarouter/provisioning/utils/DB.java | 710 +++++++++++++++++
.../datarouter/provisioning/utils/DRRouteCLI.java | 456 +++++++++++
.../provisioning/utils/JSONUtilities.java | 76 ++
.../provisioning/utils/LogfileLoader.java | 548 +++++++++++++
.../provisioning/utils/PurgeLogDirTask.java | 70 ++
.../datarouter/provisioning/utils/RLEBitSet.java | 418 ++++++++++
.../provisioning/utils/ThrottleFilter.java | 315 ++++++++
.../provisioning/utils/URLUtilities.java | 130 +++
.../datarouter/provisioning/utils/package.html | 30 +
.../datarouter/reports/DailyLatencyReport.java | 194 +++++
.../onap/dmaap/datarouter/reports/FeedReport.java | 394 ++++++++++
.../dmaap/datarouter/reports/LatencyReport.java | 179 +++++
.../org/onap/dmaap/datarouter/reports/Report.java | 155 ++++
.../onap/dmaap/datarouter/reports/ReportBase.java | 63 ++
.../dmaap/datarouter/reports/SubscriberReport.java | 157 ++++
.../dmaap/datarouter/reports/VolumeReport.java | 140 ++++
.../org/onap/dmaap/datarouter/reports/package.html | 43 +
.../test/java/datarouter/provisioning/FillDB.java | 3 +-
.../java/datarouter/provisioning/testBase.java | 3 +-
.../java/datarouter/provisioning/testCleanup.java | 3 +-
.../datarouter/provisioning/testDRFeedsDelete.java | 3 +-
.../datarouter/provisioning/testDRFeedsGet.java | 3 +-
.../datarouter/provisioning/testDRFeedsPost.java | 3 +-
.../datarouter/provisioning/testDRFeedsPut.java | 3 +-
.../datarouter/provisioning/testFeedDelete.java | 3 +-
.../java/datarouter/provisioning/testFeedPut.java | 3 +-
.../datarouter/provisioning/testInternalGet.java | 5 +-
.../datarouter/provisioning/testInternalMisc.java | 3 +-
.../java/datarouter/provisioning/testLogGet.java | 3 +-
.../java/datarouter/provisioning/testPublish.java | 3 +-
.../datarouter/provisioning/testRLEBitSet.java | 3 +-
.../datarouter/provisioning/testSubscribePost.java | 5 +-
168 files changed, 16589 insertions(+), 16636 deletions(-)
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponse.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponseSupplement.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/Authorizer.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespImpl.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespSupplementImpl.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthzResource.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvAuthorizer.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvDataProvider.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/package.html
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/authz/package.html
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/BaseServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/DRFeedsServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedLogServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/GroupServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/InternalServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/LogServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Main.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Poker.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/ProxyServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/PublishServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/RouteServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/StatisticsServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubLogServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscribeServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscriptionServlet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SynchronizerTask.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/BaseLogRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Deleteable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryExtraRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EgressRoute.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EventLogRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/ExpiryRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Feed.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedAuthorization.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedEndpointID.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedLinks.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Group.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/IngressRoute.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Insertable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/JSONable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LOGJSONable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Loadable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LogRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NetworkRoute.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NodeClass.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Parameters.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PubFailRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PublishRecord.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubDelivery.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubLinks.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Subscription.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Syncable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Updateable.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/package.html
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/EelfMsgs.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/JettyFilter.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/package.html
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DB.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DRRouteCLI.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/JSONUtilities.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/LogfileLoader.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/PurgeLogDirTask.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/RLEBitSet.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/ThrottleFilter.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/URLUtilities.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/package.html
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/DailyLatencyReport.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/FeedReport.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/LatencyReport.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/Report.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/ReportBase.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/SubscriberReport.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/VolumeReport.java
delete mode 100644 datarouter-prov/src/main/java/com/att/research/datarouter/reports/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponse.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponseSupplement.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/DRFeedsServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedLogServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/FeedServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/GroupServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/InternalServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/LogServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Main.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/Poker.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/ProxyServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/PublishServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/RouteServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubLogServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscribeServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SubscriptionServlet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/BaseLogRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Deleteable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryExtraRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/DeliveryRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EgressRoute.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/EventLogRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/ExpiryRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Feed.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedAuthorization.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedEndpointID.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/FeedLinks.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/IngressRoute.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Insertable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/JSONable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LOGJSONable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Loadable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LogRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NetworkRoute.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/NodeClass.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PubFailRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/PublishRecord.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubDelivery.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/SubLinks.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Subscription.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Syncable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Updateable.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/eelf/EelfMsgs.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/eelf/JettyFilter.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DRRouteCLI.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/JSONUtilities.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/RLEBitSet.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/ThrottleFilter.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/URLUtilities.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/package.html
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/FeedReport.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/Report.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/ReportBase.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java
create mode 100644 datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/package.html
(limited to 'datarouter-prov')
diff --git a/datarouter-prov/pom.xml b/datarouter-prov/pom.xml
index 51c995de..80eecc76 100644
--- a/datarouter-prov/pom.xml
+++ b/datarouter-prov/pom.xml
@@ -21,14 +21,17 @@
*
-->
- 4.0.0
-
- com.att.datarouter-prov
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ 4.0.0
+
+ org.onap.dmaap.datarouter
+ parent
+ 1.0.0-SNAPSHOT
+ ../pom.xml
+
+
datarouter-prov
- 1.0.0-SNAPSHOT
jar
-
datarouter-prov
https://github.com/att/DMAAP_DATAROUTER
@@ -42,7 +45,11 @@
1.8
1.8
${basedir}/target/
- hub.docker.com
+ https://nexus.onap.org
+ /content/repositories/snapshots/
+ /content/repositories/releases/
+ /content/repositories/staging/
+ /content/sites/site/${project.groupId}/${project.artifactId}/${project.version}
@@ -262,7 +269,7 @@
true
- com.att.research.datarouter.provisioning.Main
+ org.onap.datarouter.provisioning.Main
@@ -286,7 +293,7 @@
true
- com.att.research.datarouter.provisioning.Main
+ org.onap.datarouter.provisioning.Main
${basedir}/target/opt/app/datartr/lib
@@ -341,24 +348,6 @@
-
- com.blackducksoftware.integration
- hub-maven-plugin
- 1.0.4
- false
-
- ${project.basedir}
-
-
-
- create-bdio-file
- package
-
- createHubOutput
-
-
-
-
maven-resources-plugin
@@ -493,35 +482,7 @@
-
-
-
-
- org.apache.maven.plugins
- maven-gpg-plugin
- 1.5
-
-
- sign-artifacts
- verify
-
- sign
-
-
-
-
-
-
- org.sonatype.plugins
- nexus-staging-maven-plugin
- 1.6.7
- true
-
- ossrhdme
- https://oss.sonatype.org/
- true
-
-
+
org.codehaus.mojo
@@ -536,23 +497,49 @@
-
-
-
- ossrhdme
- https://oss.sonatype.org/content/repositories/snapshots
-
-
- ossrhdme
- https://oss.sonatype.org/service/local/staging/deploy/maven2/
-
+
+ ecomp-releases
+ AAF Release Repository
+ ${nexusproxy}${releaseNexusPath}
+
+
+ ecomp-snapshots
+ AAF Snapshot Repository
+ ${nexusproxy}${snapshotNexusPath}
+
+
+ ecomp-site
+ dav:${nexusproxy}${sitePath}
+
-
-
- https://github.com/att/DMAAP_DATAROUTER.git
- ${project.scm.connection}
- https://github.com/att/DMAAP_DATAROUTER/tree/master
-
+
+
+ onap-plugin-snapshots
+ https://nexus.onap.org/content/repositories/snapshots/
+
+
+
+
+
+ central
+ Maven 2 repository 2
+ http://repo2.maven.org/maven2/
+
+
+ onap-jar-snapshots
+ https://nexus.onap.org/content/repositories/snapshots
+
+
+ spring-repo
+ Spring repo
+ https://artifacts.alfresco.com/nexus/content/repositories/public/
+
+
+ repository.jboss.org-public
+ JBoss.org Maven repository
+ https://repository.jboss.org/nexus/content/groups/public
+
+
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponse.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponse.java
deleted file mode 100644
index 26956f82..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponse.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-package com.att.research.datarouter.authz;
-
-import java.util.List;
-
-/**
- * The AuthorizationResponse
interface gives the caller access to information about an authorization
- * decision. This information includes the permit/deny decision itself, along with supplementary information in the form of
- * advice and obligations. (The advice and obligations will not be used in Data Router R1.)
- *
- * @author J. F. Lucas
- *
- */
-public interface AuthorizationResponse {
- /**
- * Indicates whether the request is authorized or not.
- *
- * @return a boolean flag that is true
if the request is permitted, and false
otherwise.
- */
- public boolean isAuthorized();
-
- /**
- * Returns any advice elements that were included in the authorization response.
- *
- * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
- * advice element from the authorization response.
- */
- public List getAdvice();
-
- /**
- * Returns any obligation elements that were included in the authorization response.
- *
- * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
- * obligation element from the authorization response.
- */
- public List getObligations();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponseSupplement.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponseSupplement.java
deleted file mode 100644
index 2829c507..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/AuthorizationResponseSupplement.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.authz;
-
-import java.util.Map;
-
-/** An object that meets the AuthorizationResponseSupplement
interface carries supplementary
- * information for an authorization response. In a XACML-based system, a response to an authorization request
- * carries not just the permit/deny decision but, optionally, supplemental information in the form of advice and
- * obligation elements. The structure of a XACML advice element and a XACML obligation element are similar: each has an identifier and
- * a set of attributes (name-value) pairs. (The difference between a XACML advice element and a XACML obligation element is in
- * how the recipient of the response--the Policy Enforcement Point, in XACML terminology--handles the element.)
- *
- * @author J. F. Lucas
- *
- */
-public interface AuthorizationResponseSupplement {
- /** Return the identifier for the supplementary information element.
- *
- * @return a String
containing the identifier.
- */
- public String getId();
-
- /** Return the attributes for the supplementary information element, as a Map
in which
- * keys represent attribute identifiers and values represent attribute values.
- *
- * @return attributes for the supplementary information element.
- */
- public Map getAttributes();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/Authorizer.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/Authorizer.java
deleted file mode 100644
index bfed5c37..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/Authorizer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.authz;
-
-import java.util.Map;
-import javax.servlet.http.HttpServletRequest;
-
-/**
- * A Data Router API that requires authorization of incoming requests creates an instance of a class that implements
- * the Authorizer
interface. The class implements all of the logic necessary to determine if an API
- * request is permitted. In Data Router R1, the classes that implement the Authorizer
interface will have
- * local logic that makes the authorization decision. After R1, these classes will instead have logic that creates XACML
- * authorization requests, sends these requests to a Policy Decision Point (PDP), and parses the XACML responses.
- *
- * @author J. F. Lucas
- *
- */
-public interface Authorizer {
- /**
- * Determine if the API request carried in the request
parameter is permitted.
- *
- * @param request the HTTP request for which an authorization decision is needed
- * @return an object implementing the AuthorizationResponse
interface. This object includes the
- * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
- * of advice and obligations.
- */
- public AuthorizationResponse decide(HttpServletRequest request);
-
- /**
- * Determine if the API request carried in the request
parameter, with additional attributes provided in
- * the additionalAttrs
parameter, is permitted.
- *
- * @param request the HTTP request for which an authorization decision is needed
- * @param additionalAttrs additional attributes that the Authorizer
can in making an authorization decision
- * @return an object implementing the AuthorizationResponse
interface. This object includes the
- * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
- * of advice and obligations.
- */
- public AuthorizationResponse decide(HttpServletRequest request, Map additionalAttrs);
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespImpl.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespImpl.java
deleted file mode 100644
index db318d39..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespImpl.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.authz.impl;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.authz.AuthorizationResponseSupplement;
-
-
-/** A representation of an authorization response returned by a XACML Policy Decision Point.
- * In Data Router R1, advice and obligations are not used.
- * @author J. F. Lucas
- *
- */
-public class AuthRespImpl implements AuthorizationResponse {
- private boolean authorized;
- private List advice;
- private List obligations;
-
- /** Constructor. This version will not be used in Data Router R1 since we will not have advice and obligations.
- *
- * @param authorized flag indicating whether the response carried a permit response (true
)
- * or something else (false
).
- * @param advice list of advice elements returned in the response.
- * @param obligations list of obligation elements returned in the response.
- */
- public AuthRespImpl(boolean authorized, List advice, List obligations) {
- this.authorized = authorized;
- this.advice = (advice == null ? null : new ArrayList (advice));
- this.obligations = (obligations == null ? null : new ArrayList (obligations));
- }
-
- /** Constructor. Simple version for authorization responses that have no advice and no obligations.
- *
- * @param authorized flag indicating whether the response carried a permit (true
) or something else (false
).
- */
- public AuthRespImpl(boolean authorized) {
- this(authorized, null, null);
- }
-
- /**
- * Indicates whether the request is authorized or not.
- *
- * @return a boolean flag that is true
if the request is permitted, and false
otherwise.
- */
- @Override
- public boolean isAuthorized() {
- return authorized;
- }
-
- /**
- * Returns any advice elements that were included in the authorization response.
- *
- * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
- * advice element from the authorization response.
- */
- @Override
- public List getAdvice() {
- return advice;
- }
-
- /**
- * Returns any obligation elements that were included in the authorization response.
- *
- * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
- * obligation element from the authorization response.
- */
- @Override
- public List getObligations() {
- return obligations;
- }
-
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespSupplementImpl.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespSupplementImpl.java
deleted file mode 100644
index 5d2b61c8..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthRespSupplementImpl.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.authz.impl;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import com.att.research.datarouter.authz.AuthorizationResponseSupplement;
-
-/** Carries supplementary information--an advice or an obligation--from the authorization response returned
- * by a XACML Policy Decision Point. Not used in Data Router R1.
- * @author J. F. Lucas
- *
- */
-public class AuthRespSupplementImpl implements AuthorizationResponseSupplement {
-
- private String id = null;
- private Map attributes = null;
-
- /** Constructor, available within the package.
- *
- * @param id The identifier for the advice or obligation element
- * @param attributes The attributes (name-value pairs) for the advice or obligation element.
- */
- AuthRespSupplementImpl (String id, Map attributes) {
- this.id = id;
- this.attributes = new HashMap(attributes);
- }
-
- /** Return the identifier for the supplementary information element.
- *
- * @return a String
containing the identifier.
- */
- @Override
- public String getId() {
- return id;
- }
-
- /** Return the attributes for the supplementary information element, as a Map
in which
- * keys represent attribute identifiers and values represent attribute values.
- *
- * @return attributes for the supplementary information element.
- */
- @Override
- public Map getAttributes() {
- return attributes;
- }
-
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthzResource.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthzResource.java
deleted file mode 100644
index 1a201b7e..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/AuthzResource.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.authz.impl;
-
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/** Internal representation of an authorization resource (the entity to which access is being requested). Consists
- * of a type and an identifier. The constructor takes the request URI from an HTTP request and checks it against
- * patterns for the the different resource types. In DR R1, there are four resource types:
- *
- * the feeds collection resource, the target of POST requests to create a new feed and GET requests to list
- * the existing feeds. This is the root resource for the DR provisioning system, and it has no explicit id.
- *
- * a feed resource, the target of GET, PUT, and DELETE requests used to manage an existing feed. Each feed
- * has a unique feed ID.
- *
- * a subscription collection resource, the target of POST requests to create a new subscription and GET requests
- * to list the subscriptions for a feed. Each feed has a subscription collection, and the ID associated with a
- * subscription collection is the ID of the feed.
- *
- * a subscription resource, the target of GET, PUT, and DELETE requests used to manage an existing subscription.
- * Each subscription has a unique subscription ID.
- *
- *
- * @author J. F. Lucas
- *
- */
-public class AuthzResource {
- private ResourceType type = null;
- private String id = "";
-
- /* Construct an AuthzResource by matching a request URI against the various patterns */
- public AuthzResource(String rURI) {
- if (rURI != null) {
- for (ResourceType t : ResourceType.values()) {
- Matcher m = t.getPattern().matcher(rURI);
- if (m.find(0)) {
- this.type = t;
- if (m.group("id") != null) {
- this.id = m.group("id");
- }
- break;
- }
- }
- }
- }
-
- public ResourceType getType() {
- return this.type;
- }
-
- public String getId() {
- return this.id;
- }
-
- /* Enumeration that helps turn a request URI into something more useful for
- * authorization purposes by given a type name and a pattern for determining if the URI
- * represents that resource type.
- * Highly dependent on the URL scheme, could be parameterized.
- */
- public enum ResourceType {
- FEEDS_COLLECTION("((://[^/]+/)|(^/))(?)$"),
- SUBS_COLLECTION ("((://[^/]+/)|(^/{0,1}))subscribe/(?[^/]+)$"),
- FEED("((://[^/]+/)|(^/{0,1}))feed/(?[^/]+)$"),
- SUB("((://[^/]+/)|(^/{0,1}))subs/(?[^/]+)$");
-
- private Pattern uriPattern;
-
- private ResourceType(String patternString) {
- this.uriPattern = Pattern.compile(patternString);
- }
-
- Pattern getPattern() {
- return this.uriPattern;
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvAuthorizer.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvAuthorizer.java
deleted file mode 100644
index d6683d5c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvAuthorizer.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.authz.impl;
-
-import java.util.Map;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.log4j.Logger;
-
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.authz.Authorizer;
-import com.att.research.datarouter.authz.impl.AuthzResource.ResourceType;
-
-/** Authorizer for the provisioning API for Data Router R1
- *
- * @author J. F. Lucas
- *
- */
-public class ProvAuthorizer implements Authorizer {
-
- private Logger log;
- private ProvDataProvider provData;
-
- private static final String SUBJECT_HEADER = "X-ATT-DR-ON-BEHALF-OF"; // HTTP header carrying requester identity
- private static final String SUBJECT_HEADER_GROUP = "X-ATT-DR-ON-BEHALF-OF-GROUP"; // HTTP header carrying requester identity by group Rally : US708115
- /** Constructor. For the moment, do nothing special. Make it a singleton?
- *
- */
- public ProvAuthorizer(ProvDataProvider provData) {
- this.provData = provData;
- this.log = Logger.getLogger(this.getClass());
- }
-
- /**
- * Determine if the API request carried in the request
parameter is permitted.
- *
- * @param request the HTTP request for which an authorization decision is needed
- * @return an object implementing the AuthorizationResponse
interface. This object includes the
- * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
- * of advice and obligations.
- */
- @Override
- public AuthorizationResponse decide(HttpServletRequest request) {
- return this.decide(request, null);
- }
-
- /**
- * Determine if the API request carried in the request
parameter, with additional attributes provided in
- * the additionalAttrs
parameter, is permitted. additionalAttrs
isn't used in R1.
- *
- * @param request the HTTP request for which an authorization decision is needed
- * @param additionalAttrs additional attributes that the Authorizer
can in making an authorization decision
- * @return an object implementing the AuthorizationResponse
interface. This object includes the
- * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
- * of advice and obligations.
- */
- @Override
- public AuthorizationResponse decide(HttpServletRequest request,
- Map additionalAttrs) {
- log.trace ("Entering decide()");
-
- boolean decision = false;
-
- // Extract interesting parts of the HTTP request
- String method = request.getMethod();
- AuthzResource resource = new AuthzResource(request.getRequestURI());
- String subject = (request.getHeader(SUBJECT_HEADER)); // identity of the requester
- String subjectgroup = (request.getHeader(SUBJECT_HEADER_GROUP)); // identity of the requester by group Rally : US708115
-
- log.trace("Method: " + method + " -- Type: " + resource.getType() + " -- Id: " + resource.getId() +
- " -- Subject: " + subject);
-
- // Choose authorization method based on the resource type
- ResourceType resourceType = resource.getType();
- if (resourceType != null) {
-
- switch (resourceType) {
-
- case FEEDS_COLLECTION:
- decision = allowFeedsCollectionAccess(resource, method, subject, subjectgroup);
- break;
-
- case SUBS_COLLECTION:
- decision = allowSubsCollectionAccess(resource, method, subject, subjectgroup);
- break;
-
- case FEED:
- decision = allowFeedAccess(resource, method, subject, subjectgroup);
- break;
-
- case SUB:
- decision = allowSubAccess(resource, method, subject, subjectgroup);
- break;
-
- default:
- decision = false;
- break;
- }
- }
- log.debug("Exit decide(): " + method + "|" + resourceType + "|" + resource.getId() + "|" + subject + " ==> " + decision);
-
- return new AuthRespImpl(decision);
- }
-
- private boolean allowFeedsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
-
- // Allow GET or POST unconditionally
- return method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("POST"));
- }
-
- private boolean allowSubsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
-
- // Allow GET or POST unconditionally
- return method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("POST"));
- }
-
- private boolean allowFeedAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
- boolean decision = false;
-
- // Allow GET, PUT, or DELETE if requester (subject) is the owner (publisher) of the feed
- if ( method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("PUT") ||
- method.equalsIgnoreCase("DELETE"))) {
-
- String owner = provData.getFeedOwner(resource.getId());
- decision = (owner != null) && owner.equals(subject);
-
- //Verifying by group Rally : US708115
- if(subjectgroup != null) {
- String feedowner = provData.getGroupByFeedGroupId(subject, resource.getId());
- decision = (feedowner != null) && feedowner.equals(subjectgroup);
- }
- }
-
- return decision;
- }
-
- private boolean allowSubAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
- boolean decision = false;
-
- // Allow GET, PUT, or DELETE if requester (subject) is the owner of the subscription (subscriber)
- if (method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("PUT") ||
- method.equalsIgnoreCase("DELETE") || method.equalsIgnoreCase("POST"))) {
-
- String owner = provData.getSubscriptionOwner(resource.getId());
- decision = (owner != null) && owner.equals(subject);
-
- //Verifying by group Rally : US708115
- if(subjectgroup != null) {
- String feedowner = provData.getGroupBySubGroupId(subject, resource.getId());
- decision = (feedowner != null) && feedowner.equals(subjectgroup);
- }
- }
-
- return decision;
- }
-
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvDataProvider.java b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvDataProvider.java
deleted file mode 100644
index 76ae0344..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/ProvDataProvider.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-package com.att.research.datarouter.authz.impl;
-
-/** Interface to access data about subscriptions and feeds. A software component that
- * uses the ProvAuthorizer
needs to supply an implementation of this interface.
- * @author J. F. Lucas
- *
- */
-public interface ProvDataProvider {
-
- /** Get the identity of the owner of a feed.
- *
- * @param feedId the feed ID of the feed whose owner is being looked up.
- * @return the feed owner's identity
- */
- public String getFeedOwner(String feedId);
-
- /** Get the security classification of a feed.
- *
- * @param feedId the ID of the feed whose classification is being looked up.
- * @return the classification of the feed.
- */
- public String getFeedClassification(String feedId);
-
- /** Get the identity of the owner of a feed
- *
- * @param subId the ID of the subscripition whose owner is being looked up.
- * @return the subscription owner's identity.
- */
- public String getSubscriptionOwner(String subId);
-
- /** Get the identity of the owner of a feed by group id - Rally : US708115
- *
- * @param feedid, user the ID of the feed whose owner is being looked up.
- * @return the feed owner's identity by group.
- */
- public String getGroupByFeedGroupId(String owner, String feedId);
-
- /** Get the identity of the owner of a sub by group id Rally : US708115
- *
- * @param subid, user the ID of the feed whose owner is being looked up.
- * @return the feed owner's identity by group.
- */
- public String getGroupBySubGroupId(String owner, String subId);
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/package.html
deleted file mode 100644
index fae27ee0..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/impl/package.html
+++ /dev/null
@@ -1,68 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-
-
-This package provides an implementation of the authorization-related interfaces
-defined by the com.att.research.datarouter.authz
package, intended for
-use with the provisioning server for Data Router Release 1. In DR R1, we do not
-have an external policy engine, so this implementation performs the authorization
-locally.
-
-
-In order to perform the authorization, this package needs access to provisioning data
-about feeds and subscriptions. This package defines an interface
-(com.att.research.datarouter.authz.impl.ProvDataProvider
) through which it
-expects to get this data. The provisioning server code must provide an implementation
-of this interface.
-
-
-A software component that wishes to use this implementation must:
-
-Provide an implementation of the
-com.att.research.datarouter.authz.impl.ProvDataProvider
-interface.
-
-
-Create an instance of the ProvDataProvider
implementation.
-
-Create an instance of the
-com.att.research.datarouter.authz.impl.ProvAuthorizer
-class defined in this package, passing it an instance of the ProvDataProvider
-implementation.
-
-
-
-
-Example:
-
-
-ProvDataProvider dataProv = new MyDataProvider();
-Authorizer authz = new ProvAuthorizer(dataProv);
-
-
-
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/authz/package.html
deleted file mode 100644
index 7628ae82..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/authz/package.html
+++ /dev/null
@@ -1,38 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-
-
-This package defines an interface that can be used by servlet-based HTTP APIs to
-make authorization requests and receive authorization responses from an external
-authorization entity such as a XACML Policy Decision Point (PDP).
-
-
-In Data Router Release 1, there is no external authorization system. The provisioning server
-will use an implementation of this interface for local authorization of provisioning requests.
-
-
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/BaseServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/BaseServlet.java
deleted file mode 100644
index a4784936..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/BaseServlet.java
+++ /dev/null
@@ -1,869 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
-
-import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
-import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.security.cert.X509Certificate;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.List;
-import java.util.ArrayList;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.json.JSONException;
-import org.slf4j.MDC;
-
-import com.att.research.datarouter.authz.Authorizer;
-import com.att.research.datarouter.authz.impl.ProvAuthorizer;
-import com.att.research.datarouter.authz.impl.ProvDataProvider;
-import com.att.research.datarouter.provisioning.beans.Deleteable;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.beans.Insertable;
-import com.att.research.datarouter.provisioning.beans.NodeClass;
-import com.att.research.datarouter.provisioning.beans.Parameters;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.beans.Updateable;
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.ThrottleFilter;
-import com.att.research.datarouter.provisioning.beans.Group; //Groups feature Rally:US708115 - 1610
-
-import java.util.Properties;
-import java.util.regex.Pattern;
-import javax.mail.Message;
-import javax.mail.MessagingException;
-import javax.mail.Multipart;
-import javax.mail.Session;
-import javax.mail.Transport;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeBodyPart;
-import javax.mail.internet.MimeMessage;
-import javax.mail.internet.MimeMultipart;
-/**
- * This is the base class for all Servlets in the provisioning code.
- * It provides standard constants and some common methods.
- *
- * @author Robert Eby
- * @version $Id: BaseServlet.java,v 1.16 2014/03/12 19:45:40 eby Exp $
- */
-@SuppressWarnings("serial")
-public class BaseServlet extends HttpServlet implements ProvDataProvider {
- public static final String BEHALF_HEADER = "X-ATT-DR-ON-BEHALF-OF";
- public static final String FEED_BASECONTENT_TYPE = "application/vnd.att-dr.feed";
- public static final String FEED_CONTENT_TYPE = "application/vnd.att-dr.feed; version=2.0";
- public static final String FEEDFULL_CONTENT_TYPE = "application/vnd.att-dr.feed-full; version=2.0";
- public static final String FEEDLIST_CONTENT_TYPE = "application/vnd.att-dr.feed-list; version=1.0";
- public static final String SUB_BASECONTENT_TYPE = "application/vnd.att-dr.subscription";
- public static final String SUB_CONTENT_TYPE = "application/vnd.att-dr.subscription; version=2.0";
- public static final String SUBFULL_CONTENT_TYPE = "application/vnd.att-dr.subscription-full; version=2.0";
- public static final String SUBLIST_CONTENT_TYPE = "application/vnd.att-dr.subscription-list; version=1.0";
-
-
- //Adding groups functionality, ...1610
- public static final String GROUP_BASECONTENT_TYPE = "application/vnd.att-dr.group";
- public static final String GROUP_CONTENT_TYPE = "application/vnd.att-dr.group; version=2.0";
- public static final String GROUPFULL_CONTENT_TYPE = "application/vnd.att-dr.group-full; version=2.0";
- public static final String GROUPLIST_CONTENT_TYPE = "application/vnd.att-dr.fegrouped-list; version=1.0";
-
-
- public static final String LOGLIST_CONTENT_TYPE = "application/vnd.att-dr.log-list; version=1.0";
- public static final String PROVFULL_CONTENT_TYPE1 = "application/vnd.att-dr.provfeed-full; version=1.0";
- public static final String PROVFULL_CONTENT_TYPE2 = "application/vnd.att-dr.provfeed-full; version=2.0";
- public static final String CERT_ATTRIBUTE = "javax.servlet.request.X509Certificate";
-
- public static final String DB_PROBLEM_MSG = "There has been a problem with the DB. It is suggested you try the operation again.";
-
- public static final int DEFAULT_MAX_FEEDS = 10000;
- public static final int DEFAULT_MAX_SUBS = 100000;
- public static final int DEFAULT_POKETIMER1 = 5;
- public static final int DEFAULT_POKETIMER2 = 30;
- public static final String DEFAULT_DOMAIN = "web.att.com";
- public static final String DEFAULT_PROVSRVR_NAME = "feeds-drtr.web.att.com";
- public static final String RESEARCH_SUBNET = "135.207.136.128/25";
- public static final String STATIC_ROUTING_NODES = ""; //Adding new param for static Routing - Rally:US664862-1610
-
- /** A boolean to trigger one time "provisioning changed" event on startup */
- private static boolean startmsg_flag = true;
- /** This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE) */
- private static boolean require_secure = true;
- /** This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT) */
- private static boolean require_cert = true;
- /** The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES) */
- private static Set authorizedAddressesAndNetworks = new HashSet();
- /** The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS) */
- private static Set authorizedNames = new HashSet();
- /** The FQDN of the initially "active" provisioning server in this Data Router ecosystem */
- private static String initial_active_pod;
- /** The FQDN of the initially "standby" provisioning server in this Data Router ecosystem */
- private static String initial_standby_pod;
- /** The FQDN of this provisioning server in this Data Router ecosystem */
- private static String this_pod;
- /** "Timer 1" - used to determine when to notify nodes of provisioning changes */
- private static long poke_timer1;
- /** "Timer 2" - used to determine when to notify nodes of provisioning changes */
- private static long poke_timer2;
- /** Array of nodes names and/or FQDNs */
- private static String[] nodes = new String[0];
- /** Array of node IP addresses */
- private static InetAddress[] nodeAddresses = new InetAddress[0];
- /** Array of POD IP addresses */
- private static InetAddress[] podAddresses = new InetAddress[0];
- /** The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT) */
- protected static int max_feeds = 0;
- /** The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT) */
- protected static int max_subs = 0;
- /** The current number of feeds in the system */
- protected static int active_feeds = 0;
- /** The current number of subscriptions in the system */
- protected static int active_subs = 0;
- /** The domain used to generate a FQDN from the "bare" node names */
- public static String prov_domain = "web.att.com";
- /** The standard FQDN of the provisioning server in this Data Router ecosystem */
- public static String prov_name = "feeds-drtr.web.att.com";
- /** The standard FQDN of the ACTIVE provisioning server in this Data Router ecosystem */
- public static String active_prov_name = "feeds-drtr.web.att.com";
- /** Special subnet that is allowed access to /internal */
- protected static String special_subnet = RESEARCH_SUBNET;
-
- /** Special subnet that is allowed access to /internal to Lab Machine */
- protected static String special_subnet_secondary = RESEARCH_SUBNET;
- protected static String static_routing_nodes = STATIC_ROUTING_NODES; //Adding new param for static Routing - Rally:US664862-1610
-
- /** This logger is used to log provisioning events */
- protected static Logger eventlogger;
- /** This logger is used to log internal events (errors, etc.) */
- protected static Logger intlogger;
- /** Authorizer - interface to the Policy Engine */
- protected static Authorizer authz;
- /** The Synchronizer used to sync active DB to standby one */
- protected static SynchronizerTask synctask = null;
-
- //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- private InetAddress thishost;
- private InetAddress loopback;
- private static Boolean mailSendFlag = false;
-
- public static final String MAILCONFIG_FILE = "mail.properties";
- private static Properties mailprops;
- /**
- * Initialize data common to all the provisioning server servlets.
- */
- protected BaseServlet() {
- if (eventlogger == null)
- eventlogger = Logger.getLogger("com.att.research.datarouter.provisioning.events");
- if (intlogger == null)
- intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- if (authz == null)
- authz = new ProvAuthorizer(this);
- if (startmsg_flag) {
- startmsg_flag = false;
- provisioningParametersChanged();
- }
- if (synctask == null) {
- synctask = SynchronizerTask.getSynchronizer();
- }
- String name = this.getClass().getName();
- intlogger.info("PROV0002 Servlet "+name+" started.");
- }
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- try {
- thishost = InetAddress.getLocalHost();
- loopback = InetAddress.getLoopbackAddress();
- checkHttpsRelaxation(); //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- } catch (UnknownHostException e) {
- // ignore
- }
- }
- protected int getIdFromPath(HttpServletRequest req) {
- String path = req.getPathInfo();
- if (path == null || path.length() < 2)
- return -1;
- try {
- return Integer.parseInt(path.substring(1));
- } catch (NumberFormatException e) {
- return -1;
- }
- }
- /**
- * Read the request's input stream and return a JSONObject from it
- * @param req the HTTP request
- * @return the JSONObject, or null if the stream cannot be parsed
- */
- protected JSONObject getJSONfromInput(HttpServletRequest req) {
- JSONObject jo = null;
- try {
- jo = new JSONObject(new JSONTokener(req.getInputStream()));
- if (intlogger.isDebugEnabled())
- intlogger.debug("JSON: "+jo.toString());
- } catch (Exception e) {
- intlogger.info("Error reading JSON: "+e);
- }
- return jo;
- }
- /**
- * Check if the remote host is authorized to perform provisioning.
- * Is the request secure?
- * Is it coming from an authorized IP address or network (configured via PROV_AUTH_ADDRESSES)?
- * Does it have a valid client certificate (configured via PROV_AUTH_SUBJECTS)?
- * @param request the request
- * @return an error string, or null if all is OK
- */
- protected String isAuthorizedForProvisioning(HttpServletRequest request) {
- // Is the request https?
- if (require_secure && !request.isSecure()) {
- return "Request must be made over an HTTPS connection.";
- }
-
- // Is remote IP authorized?
- String remote = request.getRemoteAddr();
- try {
- boolean found = false;
- InetAddress ip = InetAddress.getByName(remote);
- for (String addrnet : authorizedAddressesAndNetworks) {
- found |= addressMatchesNetwork(ip, addrnet);
- }
- if (!found) {
- return "Unauthorized address: "+remote;
- }
- } catch (UnknownHostException e) {
- return "Unauthorized address: "+remote;
- }
-
- // Does remote have a valid certificate?
- if (require_cert) {
- X509Certificate certs[] = (X509Certificate[]) request.getAttribute(CERT_ATTRIBUTE);
- if (certs == null || certs.length == 0) {
- return "Client certificate is missing.";
- }
- // cert[0] is the client cert
- // see http://www.proto.research.att.com/java/java7/api/javax/net/ssl/SSLSession.html#getPeerCertificates()
- String name = certs[0].getSubjectX500Principal().getName();
- if (!authorizedNames.contains(name)) {
- return "No authorized certificate found.";
- }
- }
-
- // No problems!
- return null;
- }
- /**
- * Check if the remote IP address is authorized to see the /internal URL tree.
- * @param request the HTTP request
- * @return true iff authorized
- */
- protected boolean isAuthorizedForInternal(HttpServletRequest request) {
- try {
- InetAddress ip = InetAddress.getByName(request.getRemoteAddr());
- for (InetAddress node : getNodeAddresses()) {
- if (node != null && ip.equals(node))
- return true;
- }
- for (InetAddress pod : getPodAddresses()) {
- if (pod != null && ip.equals(pod))
- return true;
- }
- if (thishost != null && ip.equals(thishost))
- return true;
- if (loopback != null && ip.equals(loopback))
- return true;
- // Also allow the "special subnet" access
- if (addressMatchesNetwork(ip, special_subnet_secondary))
- return true;
- if (addressMatchesNetwork(ip, special_subnet))
- return true;
- } catch (UnknownHostException e) {
- // ignore
- }
- return false;
- }
- /**
- * Check if an IP address matches a network address.
- * @param ip the IP address
- * @param s the network address; a bare IP address may be matched also
- * @return true if they intersect
- */
- protected static boolean addressMatchesNetwork(InetAddress ip, String s) {
- int mlen = -1;
- int n = s.indexOf("/");
- if (n >= 0) {
- mlen = Integer.parseInt(s.substring(n+1));
- s = s.substring(0, n);
- }
- try {
- InetAddress i2 = InetAddress.getByName(s);
- byte[] b1 = ip.getAddress();
- byte[] b2 = i2.getAddress();
- if (b1.length != b2.length)
- return false;
- if (mlen > 0) {
- byte[] masks = {
- (byte)0x00, (byte)0x80, (byte)0xC0, (byte)0xE0,
- (byte)0xF0, (byte)0xF8, (byte)0xFC, (byte)0xFE
- };
- byte mask = masks[mlen%8];
- for (n = mlen/8; n < b1.length; n++) {
- b1[n] &= mask;
- b2[n] &= mask;
- mask = 0;
- }
- }
- for (n = 0; n < b1.length; n++)
- if (b1[n] != b2[n])
- return false;
- } catch (UnknownHostException e) {
- return false;
- }
- return true;
- }
- /**
- * Something has changed in the provisioning data.
- * Start the timers that will cause the pre-packaged JSON string to be regenerated,
- * and cause nodes and the other provisioning server to be notified.
- */
- public static void provisioningDataChanged() {
- long now = System.currentTimeMillis();
- Poker p = Poker.getPoker();
- p.setTimers(now + (poke_timer1 * 1000L), now + (poke_timer2 * 1000L));
- }
- /**
- * Something in the parameters has changed, reload all parameters from the DB.
- */
- public static void provisioningParametersChanged() {
- Map map = Parameters.getParameters();
- require_secure = getBoolean(map, Parameters.PROV_REQUIRE_SECURE);
- require_cert = getBoolean(map, Parameters.PROV_REQUIRE_CERT);
- authorizedAddressesAndNetworks = getSet(map, Parameters.PROV_AUTH_ADDRESSES);
- authorizedNames = getSet (map, Parameters.PROV_AUTH_SUBJECTS);
- nodes = getSet (map, Parameters.NODES).toArray(new String[0]);
- max_feeds = getInt (map, Parameters.PROV_MAXFEED_COUNT, DEFAULT_MAX_FEEDS);
- max_subs = getInt (map, Parameters.PROV_MAXSUB_COUNT, DEFAULT_MAX_SUBS);
- poke_timer1 = getInt (map, Parameters.PROV_POKETIMER1, DEFAULT_POKETIMER1);
- poke_timer2 = getInt (map, Parameters.PROV_POKETIMER2, DEFAULT_POKETIMER2);
- prov_domain = getString (map, Parameters.PROV_DOMAIN, DEFAULT_DOMAIN);
- prov_name = getString (map, Parameters.PROV_NAME, DEFAULT_PROVSRVR_NAME);
- active_prov_name = getString (map, Parameters.PROV_ACTIVE_NAME, prov_name);
- special_subnet = getString (map, Parameters.PROV_SPECIAL_SUBNET, RESEARCH_SUBNET);
- static_routing_nodes = getString (map, Parameters.STATIC_ROUTING_NODES, ""); //Adding new param for static Routing - Rally:US664862-1610
- initial_active_pod = getString (map, Parameters.ACTIVE_POD, "");
- initial_standby_pod = getString (map, Parameters.STANDBY_POD, "");
- static_routing_nodes = getString (map, Parameters.STATIC_ROUTING_NODES, ""); //Adding new param for static Routing - Rally:US664862-1610
- active_feeds = Feed.countActiveFeeds();
- active_subs = Subscription.countActiveSubscriptions();
- try {
- this_pod = InetAddress.getLocalHost().getHostName();
- } catch (UnknownHostException e) {
- this_pod = "";
- intlogger.warn("PROV0014 Cannot determine the name of this provisioning server.");
- }
-
- // Normalize the nodes, and fill in nodeAddresses
- InetAddress[] na = new InetAddress[nodes.length];
- for (int i = 0; i < nodes.length; i++) {
- if (nodes[i].indexOf('.') < 0)
- nodes[i] += "." + prov_domain;
- try {
- na[i] = InetAddress.getByName(nodes[i]);
- intlogger.debug("PROV0003 DNS lookup: "+nodes[i]+" => "+na[i].toString());
- } catch (UnknownHostException e) {
- na[i] = null;
- intlogger.warn("PROV0004 Cannot lookup "+nodes[i]+": "+e);
- }
- }
-
- //Reset Nodes arr after - removing static routing Nodes, Rally Userstory - US664862 .
- List filterNodes = new ArrayList<>();
- for (int i = 0; i < nodes.length; i++) {
- if(!static_routing_nodes.contains(nodes[i])){
- filterNodes.add(nodes[i]);
- }
- }
- String [] filteredNodes = filterNodes.toArray(new String[filterNodes.size()]);
- nodes = filteredNodes;
-
- nodeAddresses = na;
- NodeClass.setNodes(nodes); // update NODES table
-
- // Normalize the PODs, and fill in podAddresses
- String[] pods = getPods();
- na = new InetAddress[pods.length];
- for (int i = 0; i < pods.length; i++) {
- if (pods[i].indexOf('.') < 0)
- pods[i] += "." + prov_domain;
- try {
- na[i] = InetAddress.getByName(pods[i]);
- intlogger.debug("PROV0003 DNS lookup: "+pods[i]+" => "+na[i].toString());
- } catch (UnknownHostException e) {
- na[i] = null;
- intlogger.warn("PROV0004 Cannot lookup "+pods[i]+": "+e);
- }
- }
- podAddresses = na;
-
- // Update ThrottleFilter
- ThrottleFilter.configure();
-
- // Check if we are active or standby POD
- if (!isInitialActivePOD() && !isInitialStandbyPOD())
- intlogger.warn("PROV0015 This machine is neither the active nor the standby POD.");
- }
-
-
- /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- * Load mail properties.
- * @author vs215k
- *
- **/
- private void loadMailProperties() {
- if (mailprops == null) {
- mailprops = new Properties();
- InputStream inStream = getClass().getClassLoader().getResourceAsStream(MAILCONFIG_FILE);
- try {
- mailprops.load(inStream);
- } catch (IOException e) {
- intlogger.fatal("PROV9003 Opening properties: "+e.getMessage());
- e.printStackTrace();
- System.exit(1);
- }
- finally {
- try {
- inStream.close();
- }
- catch (IOException e) {
- }
- }
- }
- }
-
- /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- * Check if HTTPS Relexaction is enabled
- * @author vs215k
- *
- **/
- private void checkHttpsRelaxation() {
- if(mailSendFlag == false) {
- Properties p = (new DB()).getProperties();
- intlogger.info("HTTPS relaxatio: "+p.get("com.att.research.datarouter.provserver.https.relaxation"));
-
- if(p.get("com.att.research.datarouter.provserver.https.relaxation").equals("true")) {
- try {
- notifyPSTeam(p.get("com.att.research.datarouter.provserver.https.relax.notify").toString());
- }
- catch (Exception e) {
- e.printStackTrace();
- }
- }
- mailSendFlag = true;
- }
- }
-
- /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- * @author vs215k
- * @param email - list of email ids to notify if HTTP relexcation is enabled.
- **/
- private void notifyPSTeam(String email) throws Exception {
- loadMailProperties(); //Load HTTPS Relex mail properties.
- String[] emails = email.split(Pattern.quote("|"));
-
- Properties mailproperties = new Properties();
- mailproperties.put("mail.smtp.host", mailprops.get("com.att.dmaap.datarouter.mail.server"));
- mailproperties.put("mail.transport.protocol", mailprops.get("com.att.dmaap.datarouter.mail.protocol"));
-
- Session session = Session.getDefaultInstance(mailproperties, null);
- Multipart mp = new MimeMultipart();
- MimeBodyPart htmlPart = new MimeBodyPart();
-
- try {
-
- Message msg = new MimeMessage(session);
- msg.setFrom(new InternetAddress(mailprops.get("com.att.dmaap.datarouter.mail.from").toString()));
-
- InternetAddress[] addressTo = new InternetAddress[emails.length];
- for ( int x =0 ; x < emails.length; x++) {
- addressTo[x] = new InternetAddress(emails[x]);
- }
-
- msg.addRecipients(Message.RecipientType.TO, addressTo);
- msg.setSubject(mailprops.get("com.att.dmaap.datarouter.mail.subject").toString());
- htmlPart.setContent(mailprops.get("com.att.dmaap.datarouter.mail.body").toString().replace("[SERVER]", InetAddress.getLocalHost().getHostName()), "text/html");
- mp.addBodyPart(htmlPart);
- msg.setContent(mp);
-
- System.out.println(mailprops.get("com.att.dmaap.datarouter.mail.body").toString().replace("[SERVER]", InetAddress.getLocalHost().getHostName()));
-
- Transport.send(msg);
- intlogger.info("HTTPS relaxation mail is sent to - : "+email);
-
- } catch (AddressException e) {
- intlogger.error("Invalid email address, unable to send https relaxation mail to - : "+email);
- } catch (MessagingException e) {
- intlogger.error("Invalid email address, unable to send https relaxation mail to - : "+email);
- }
- }
-
-
- /**
- * Get an array of all node names in the DR network.
- * @return an array of Strings
- */
- public static String[] getNodes() {
- return nodes;
- }
- /**
- * Get an array of all node InetAddresses in the DR network.
- * @return an array of InetAddresses
- */
- public static InetAddress[] getNodeAddresses() {
- return nodeAddresses;
- }
- /**
- * Get an array of all POD names in the DR network.
- * @return an array of Strings
- */
- public static String[] getPods() {
- return new String[] { initial_active_pod, initial_standby_pod };
- }
- /**
- * Get an array of all POD InetAddresses in the DR network.
- * @return an array of InetAddresses
- */
- public static InetAddress[] getPodAddresses() {
- return podAddresses;
- }
- /**
- * Gets the FQDN of the initially ACTIVE provisioning server (POD).
- * Note: this used to be called isActivePOD(), however, that is a misnomer, as the active status
- * could shift to the standby POD without these parameters changing. Hence, the function names
- * have been changed to more accurately reflect their purpose.
- * @return the FQDN
- */
- public static boolean isInitialActivePOD() {
- return this_pod.equals(initial_active_pod);
- }
- /**
- * Gets the FQDN of the initially STANDBY provisioning server (POD).
- * Note: this used to be called isStandbyPOD(), however, that is a misnomer, as the standby status
- * could shift to the active POD without these parameters changing. Hence, the function names
- * have been changed to more accurately reflect their purpose.
- * @return the FQDN
- */
- public static boolean isInitialStandbyPOD() {
- return this_pod.equals(initial_standby_pod);
- }
- /**
- * INSERT an {@link Insertable} bean into the database.
- * @param bean the bean representing a row to insert
- * @return true if the INSERT was successful
- */
- protected boolean doInsert(Insertable bean) {
- boolean rv = false;
- DB db = new DB();
- Connection conn = null;
- try {
- conn = db.getConnection();
- rv = bean.doInsert(conn);
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
- return rv;
- }
- /**
- * UPDATE an {@link Updateable} bean in the database.
- * @param bean the bean representing a row to update
- * @return true if the UPDATE was successful
- */
- protected boolean doUpdate(Updateable bean) {
- boolean rv = false;
- DB db = new DB();
- Connection conn = null;
- try {
- conn = db.getConnection();
- rv = bean.doUpdate(conn);
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
- return rv;
- }
- /**
- * DELETE an {@link Deleteable} bean from the database.
- * @param bean the bean representing a row to delete
- * @return true if the DELETE was successful
- */
- protected boolean doDelete(Deleteable bean) {
- boolean rv = false;
- DB db = new DB();
- Connection conn = null;
- try {
- conn = db.getConnection();
- rv = bean.doDelete(conn);
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
- return rv;
- }
- private static boolean getBoolean(Map map, String name) {
- String s = map.get(name);
- return (s != null) && s.equalsIgnoreCase("true");
- }
- private static String getString(Map map, String name, String dflt) {
- String s = map.get(name);
- return (s != null) ? s : dflt;
- }
- private static int getInt(Map map, String name, int dflt) {
- try {
- String s = map.get(name);
- return Integer.parseInt(s);
- } catch (NumberFormatException e) {
- return dflt;
- }
- }
- private static Set getSet(Map map, String name) {
- Set set = new HashSet();
- String s = map.get(name);
- if (s != null) {
- String[] pp = s.split("\\|");
- if (pp != null) {
- for (String t : pp) {
- String t2 = t.trim();
- if (t2.length() > 0)
- set.add(t2);
- }
- }
- }
- return set;
- }
-
- /**
- * A class used to encapsulate a Content-type header, separating out the "version" attribute
- * (which defaults to "1.0" if missing).
- */
- public class ContentHeader {
- private String type = "";
- private Map map = new HashMap();
- public ContentHeader() {
- this("", "1.0");
- }
- public ContentHeader(String t, String v) {
- type = t.trim();
- map.put("version", v);
- }
- public String getType() {
- return type;
- }
- public String getAttribute(String key) {
- String s = map.get(key);
- if (s == null)
- s = "";
- return s;
- }
- }
-
- /**
- * Get the ContentHeader from an HTTP request.
- * @param req the request
- * @return the header, encapsulated in a ContentHeader object
- */
- public ContentHeader getContentHeader(HttpServletRequest req) {
- ContentHeader ch = new ContentHeader();
- String s = req.getHeader("Content-Type");
- if (s != null) {
- String[] pp = s.split(";");
- ch.type = pp[0].trim();
- for (int i = 1; i < pp.length; i++) {
- int ix = pp[i].indexOf('=');
- if (ix > 0) {
- String k = pp[i].substring(0, ix).trim();
- String v = pp[i].substring(ix+1).trim();
- ch.map.put(k, v);
- } else {
- ch.map.put(pp[i].trim(), "");
- }
- }
- }
- return ch;
- }
- // Methods for the Policy Engine classes - ProvDataProvider interface
- @Override
- public String getFeedOwner(String feedId) {
- try {
- int n = Integer.parseInt(feedId);
- Feed f = Feed.getFeedById(n);
- if (f != null)
- return f.getPublisher();
- } catch (NumberFormatException e) {
- // ignore
- }
- return null;
- }
- @Override
- public String getFeedClassification(String feedId) {
- try {
- int n = Integer.parseInt(feedId);
- Feed f = Feed.getFeedById(n);
- if (f != null)
- return f.getAuthorization().getClassification();
- } catch (NumberFormatException e) {
- // ignore
- }
- return null;
- }
- @Override
- public String getSubscriptionOwner(String subId) {
- try {
- int n = Integer.parseInt(subId);
- Subscription s = Subscription.getSubscriptionById(n);
- if (s != null)
- return s.getSubscriber();
- } catch (NumberFormatException e) {
- // ignore
- }
- return null;
- }
-
- /*
- * @Method - isUserMemberOfGroup - Rally:US708115
- * @Params - group object and user to check if exists in given group
- * @return - boolean value /true/false
- */
- private boolean isUserMemberOfGroup(Group group, String user) {
-
- String groupdetails = group.getMembers().replace("]", "").replace("[", "");
- String s[] = groupdetails.split("},");
-
- for(int i=0; i < s.length; i++) {
- JSONObject jsonObj = null;
- try {
- jsonObj = new JSONObject(s[i]+"}");
- if(jsonObj.get("id").equals(user))
- return true;
- } catch (JSONException e) {
- e.printStackTrace();
- }
- }
- return false;
-
- }
-
- /*
- * @Method - getGroupByFeedGroupId- Rally:US708115
- * @Params - User to check in group and feedid which is assigned the group.
- * @return - string value grupid/null
- */
- @Override
- public String getGroupByFeedGroupId(String owner, String feedId) {
- try {
- int n = Integer.parseInt(feedId);
- Feed f = Feed.getFeedById(n);
- if (f != null) {
- int groupid = f.getGroupid();
- if(groupid > 0) {
- Group group = Group.getGroupById(groupid);
- if(isUserMemberOfGroup(group, owner)) {
- return group.getAuthid();
- }
- }
- }
- } catch (NumberFormatException e) {
- // ignore
- }
- return null;
- }
-
- /*
- * @Method - getGroupBySubGroupId - Rally:US708115
- * @Params - User to check in group and subid which is assigned the group.
- * @return - string value grupid/null
- */
- @Override
- public String getGroupBySubGroupId(String owner, String subId) {
- try {
- int n = Integer.parseInt(subId);
- Subscription s = Subscription.getSubscriptionById(n);
- if (s != null) {
- int groupid = s.getGroupid();
- if(groupid > 0) {
- Group group = Group.getGroupById(groupid);
- if(isUserMemberOfGroup(group, owner)) {
- return group.getAuthid();
- }
- }
- }
- } catch (NumberFormatException e) {
- // ignore
- }
- return null;
- }
-
- /*
- * @Method - setIpAndFqdnForEelf - Rally:US664892
- * @Params - method, prints method name in EELF log.
- */
- protected void setIpAndFqdnForEelf(String method) {
- MDC.clear();
- MDC.put(MDC_SERVICE_NAME, method);
- try {
- MDC.put(MDC_SERVER_FQDN, InetAddress.getLocalHost().getHostName());
- MDC.put(MDC_SERVER_IP_ADDRESS, InetAddress.getLocalHost().getHostAddress());
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/DRFeedsServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/DRFeedsServlet.java
deleted file mode 100644
index df27042d..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/DRFeedsServlet.java
+++ /dev/null
@@ -1,300 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InvalidObjectException;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONObject;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-import com.att.research.datarouter.provisioning.utils.JSONUtilities;
-
-/**
- * This servlet handles provisioning for the <drFeedsURL> which is the URL on the
- * provisioning server used to create new feeds. It supports POST to create new feeds,
- * and GET to support the Feeds Collection Query function.
- *
- * @author Robert Eby
- * @version $Id$
- */
-@SuppressWarnings("serial")
-public class DRFeedsServlet extends ProxyServlet {
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.DRFeedsServlet");
-
- /**
- * DELETE on the <drFeedsURL> -- not supported.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "DELETE not allowed for the drFeedsURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * GET on the <drFeedsURL> -- query the list of feeds already existing in the DB.
- * See the Feeds Collection Queries section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- String path = req.getRequestURI(); // Note: I think this should be getPathInfo(), but that doesn't work (Jetty bug?)
- if (path != null && !path.equals("/")) {
- message = "Bad URL.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- String name = req.getParameter("name");
- String vers = req.getParameter("version");
- String publ = req.getParameter("publisher");
- String subs = req.getParameter("subscriber");
- if (name != null && vers != null) {
- // Display a specific feed
- Feed feed = Feed.getFeedByNameVersion(name, vers);
- if (feed == null || feed.isDeleted()) {
- message = "This feed does not exist in the database.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- } else {
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(FEEDFULL_CONTENT_TYPE);
- resp.getOutputStream().print(feed.asJSONObject(true).toString());
- }
- } else {
- // Display a list of URLs
- List list = null;
- if (name != null) {
- list = Feed.getFilteredFeedUrlList("name", name);
- } else if (publ != null) {
- list = Feed.getFilteredFeedUrlList("publ", publ);
- } else if (subs != null) {
- list = Feed.getFilteredFeedUrlList("subs", subs);
- } else {
- list = Feed.getFilteredFeedUrlList("all", null);
- }
- String t = JSONUtilities.createJSONArray(list);
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(FEEDLIST_CONTENT_TYPE);
- resp.getOutputStream().print(t);
- }
- }
- /**
- * PUT on the <drFeedsURL> -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "PUT not allowed for the drFeedsURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * POST on the <drFeedsURL> -- create a new feed.
- * See the Creating a Feed section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- String path = req.getRequestURI(); // Note: I think this should be getPathInfo(), but that doesn't work (Jetty bug?)
- if (path != null && !path.equals("/")) {
- message = "Bad URL.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // check content type is FEED_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
- if (++active_feeds > max_feeds) {
- active_feeds--;
- message = "Cannot create feed; the maximum number of feeds has been configured.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_CONFLICT);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_CONFLICT, message);
- return;
- }
- Feed feed = null;
- try {
- feed = new Feed(jo);
- } catch (InvalidObjectException e) {
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- feed.setPublisher(bhdr); // set from X-ATT-DR-ON-BEHALF-OF header
-
- // Check if this feed already exists
- Feed feed2 = Feed.getFeedByNameVersion(feed.getName(), feed.getVersion());
- if (feed2 != null) {
- message = "This feed already exists in the database.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
-
- // Create FEED table entries
- if (doInsert(feed)) {
- // send response
- elr.setResult(HttpServletResponse.SC_CREATED);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_CREATED);
- resp.setContentType(FEEDFULL_CONTENT_TYPE);
- resp.setHeader("Location", feed.getLinks().getSelf());
- resp.getOutputStream().print(feed.asLimitedJSONObject().toString());
- provisioningDataChanged();
- } else {
- // Something went wrong with the INSERT
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedLogServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedLogServlet.java
deleted file mode 100644
index dd6b75dc..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedLogServlet.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning;
-
-/**
- * This servlet handles requests to the <feedLogURL>
- * which are generated by the provisioning server to handle the log query API.
- *
- * @author Robert Eby
- * @version $Id: FeedLogServlet.java,v 1.1 2013/04/26 21:00:24 eby Exp $
- */
-@SuppressWarnings("serial")
-public class FeedLogServlet extends LogServlet {
- public FeedLogServlet() {
- super(true);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedServlet.java
deleted file mode 100644
index aff68538..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/FeedServlet.java
+++ /dev/null
@@ -1,362 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InvalidObjectException;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONObject;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-
-/**
- * This servlet handles provisioning for the <feedURL> which is generated by the provisioning
- * server to handle a particular feed. It supports DELETE to mark the feed as deleted,
- * and GET to retrieve information about the feed, and PUT to modify the feed.
- *
- * @author Robert Eby
- * @version $Id$
- */
-@SuppressWarnings("serial")
-public class FeedServlet extends ProxyServlet {
-
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.FeedServlet");
-
- /**
- * Delete the Feed at the address /feed/<feednumber>.
- * See the Deleting a Feed section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doDelete(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // Delete FEED table entry (set DELETED flag)
- feed.setDeleted(true);
- if (doUpdate(feed)) {
- active_feeds--;
- // send response
- elr.setResult(HttpServletResponse.SC_NO_CONTENT);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_NO_CONTENT);
- provisioningDataChanged();
- } else {
- // Something went wrong with the UPDATE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * Get information on the feed at the address /feed/<feednumber>.
- * See the Retrieving Information about a Feed section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(FEEDFULL_CONTENT_TYPE);
- resp.getOutputStream().print(feed.asJSONObject(true).toString());
- }
- /**
- * PUT on the <feedURL> for a feed.
- * See the Modifying a Feed section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPut(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed oldFeed = Feed.getFeedById(feedid);
- if (oldFeed == null || oldFeed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // check content type is FEED_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(FEED_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
- Feed feed = null;
- try {
- feed = new Feed(jo);
- } catch (InvalidObjectException e) {
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- feed.setFeedid(feedid);
- feed.setPublisher(bhdr); // set from X-ATT-DR-ON-BEHALF-OF header
-
- String subjectgroup = (req.getHeader("X-ATT-DR-ON-BEHALF-OF-GROUP")); //Adding for group feature:Rally US708115
- if (!oldFeed.getPublisher().equals(feed.getPublisher()) && subjectgroup == null) {
- message = "This feed must be modified by the same publisher that created it.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (!oldFeed.getName().equals(feed.getName())) {
- message = "The name of the feed may not be updated.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (!oldFeed.getVersion().equals(feed.getVersion())) {
- message = "The version of the feed may not be updated.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // Update FEEDS table entries
- if (doUpdate(feed)) {
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(FEEDFULL_CONTENT_TYPE);
- resp.getOutputStream().print(feed.asLimitedJSONObject().toString());
-
-
- /**Change Owner ship of Feed //Adding for group feature:Rally US708115*/
- if (jo.has("changeowner") && subjectgroup != null) {
- Boolean changeowner = (Boolean) jo.get("changeowner");
- if (changeowner != null && changeowner.equals(true)) {
- feed.setPublisher(req.getHeader(BEHALF_HEADER));
- feed.changeOwnerShip();
- }
- }
- /***End of change ownership*/
-
- provisioningDataChanged();
- } else {
- // Something went wrong with the UPDATE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * POST on the <feedURL> -- not supported.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- String message = "POST not allowed for the feedURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/GroupServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/GroupServlet.java
deleted file mode 100644
index 84ec3d2c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/GroupServlet.java
+++ /dev/null
@@ -1,386 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InvalidObjectException;
-import java.util.Collection;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONObject;
-
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.provisioning.BaseServlet.ContentHeader;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Group;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.utils.JSONUtilities;
-
-/**
- * This servlet handles provisioning for the <groups> which is generated by the provisioning
- * server to handle the creation and inspection of groups for FEEDS and SUBSCRIPTIONS.
- *
- * @author Vikram Singh
- * @version $Id$
- * @version $Id: Group.java,v 1.0 2016/07/19
- */
-@SuppressWarnings("serial")
-public class GroupServlet extends ProxyServlet {
- /**
- * DELETE on the <GRUPS> -- not supported.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String message = "DELETE not allowed for the GROUPS.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * GET on the the list of groups to a feed/sub.
- * See the Groups Collection Query section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
-
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
-
-
- /*ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(GROUPLIST_CONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- intlogger.debug("Content-type is: "+req.getHeader("Content-Type"));
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }*/
-
-
- int groupid = getIdFromPath(req);
- if (groupid < 0) {
- message = "Missing or bad group number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
-
- Group gup = Group.getGroupById(groupid);
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(GROUPFULL_CONTENT_TYPE);
- resp.getOutputStream().print(gup.asJSONObject().toString());
-
- // Display a list of Groups
- /*Collection list = Group.getGroupById(groupid);
- String t = JSONUtilities.createJSONArray(list);
-
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(GROUPLIST_CONTENT_TYPE);
- resp.getOutputStream().print(t);*/
- }
- /**
- * PUT on the <GROUPS> -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPut(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int groupid = getIdFromPath(req);
- if (groupid < 0) {
- message = "Missing or bad groupid.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Group oldgup = Group.getGroupById(groupid);
- if (oldgup == null) {
- message = "Missing or bad group number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
- // check content type is SUB_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(GROUP_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
- Group gup = null;
- try {
- gup = new Group(jo);
- } catch (InvalidObjectException e) {
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- gup.setGroupid(oldgup.getGroupid());
-
-
- Group gb2 = Group.getGroupMatching(gup, oldgup.getGroupid());
- if (gb2 != null) {
- eventlogger.warn("PROV0011 Creating a duplicate Group: "+gup.getName());
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Duplicate Group:"+gup.getName());
- return;
- }
-
- // Update Groups table entries
- if (doUpdate(gup)) {
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(GROUPFULL_CONTENT_TYPE);
- resp.getOutputStream().print(gup.asJSONObject().toString());
- provisioningDataChanged();
- } else {
- // Something went wrong with the UPDATE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * POST on the <groups> -- create a new GROUPS to a feed.
- * See the Creating a GROUPS section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- /*int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }*/
- // Check with the Authorizer
- /*AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }*/
-
- // check content type is SUB_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(GROUP_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- intlogger.debug("Content-type is: "+req.getHeader("Content-Type"));
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
-
- Group gup = null;
- try {
- gup = new Group(jo);
- } catch (InvalidObjectException e) {
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- //gup.setFeedid(feedid);
- //sub.setSubscriber(bhdr); // set from X-ATT-DR-ON-BEHALF-OF header
-
- // Check if this group already exists; not an error (yet), just warn
- Group gb2 = Group.getGroupMatching(gup);
- if (gb2 != null) {
- eventlogger.warn("PROV0011 Creating a duplicate Group: "+gup.getName());
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Duplicate Group:"+gup.getName());
- return;
- }
-
-
- // Create GROUPS table entries
- if (doInsert(gup)) {
- // send response
- elr.setResult(HttpServletResponse.SC_CREATED);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_CREATED);
- resp.setContentType(GROUPFULL_CONTENT_TYPE);
- resp.getOutputStream().print(gup.asJSONObject().toString());
- provisioningDataChanged();
- } else {
- // Something went wrong with the INSERT
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/InternalServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/InternalServlet.java
deleted file mode 100644
index e50a478e..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/InternalServlet.java
+++ /dev/null
@@ -1,506 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.FileStore;
-import java.nio.file.FileSystem;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.util.Properties;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONArray;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.LogRecord;
-import com.att.research.datarouter.provisioning.beans.Parameters;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.RLEBitSet;
-import com.att.research.datarouter.provisioning.utils.LogfileLoader;
-
-/**
- *
- * This servlet handles requests to URLs under /internal on the provisioning server.
- * These include:
- *
- *
- *
- * URL Path Summary
- *
- * URL Path
- * Method
- * Purpose
- *
- *
- * /internal/prov
- * GET
- * used to GET a full JSON copy of the provisioning data.
- *
- *
- * /internal/fetchProv
- * GET
- * used to signal to a standby POD that the provisioning data should be fetched from the active POD.
- *
- *
- * /internal/logs
- * GET
- * used to GET an index of log files and individual logs for this provisioning server.
- *
- *
- * POST
- * used to POST log files from the individual nodes to this provisioning server.
- *
- *
- * /internal/api
- * GET
- * used to GET an individual parameter value. The parameter name is specified by the path after /api/.
- *
- *
- * PUT
- * used to set an individual parameter value. The parameter name is specified by the path after /api/.
- *
- *
- * DELETE
- * used to remove an individual parameter value. The parameter name is specified by the path after /api/.
- *
- *
- * POST
- * used to create a new individual parameter value. The parameter name is specified by the path after /api/.
- *
- *
- * /internal/halt
- * GET
- * used to halt the server (must be accessed from 127.0.0.1).
- *
- *
- * /internal/drlogs
- * GET
- * used to get a list of DR log entries available for retrieval.
- * Note: these are the actual data router log entries sent to the provisioning server
- * by the nodes, not the provisioning server's internal logs (access via /internal/logs above).
- * The range is returned as a list of record sequence numbers.
- *
- *
- * POST
- * used to retrieve specific log entries.
- * The sequence numbers of the records to fetch are POST-ed; the records matching the sequence numbers are returned.
- *
- *
- * /internal/route/*
- * *
- * URLs under this path are handled via the {@link com.att.research.datarouter.provisioning.RouteServlet}
- *
- *
- *
- *
- * Authorization to use these URLs is a little different than for other URLs on the provisioning server.
- * For the most part, the IP address that the request comes from should be either:
- *
- *
- * an IP address of a provisioning server, or
- * the IP address of a node (to allow access to /internal/prov), or
- * an IP address from the "special subnet " which is configured with
- * the PROV_SPECIAL_SUBNET parameter.
- *
- *
- * In addition, requests to /internal/halt can ONLY come from localhost (127.0.0.1) on the HTTP port.
- *
- *
- * All DELETE/GET/PUT/POST requests made to /internal/api on this servlet on the standby server are
- * proxied to the active server (using the {@link ProxyServlet}) if it is up and reachable.
- *
- *
- * @author Robert Eby
- * @version $Id: InternalServlet.java,v 1.23 2014/03/24 18:47:10 eby Exp $
- */
-@SuppressWarnings("serial")
-public class InternalServlet extends ProxyServlet {
- private static Integer logseq = new Integer(0); // another piece of info to make log spool file names unique
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.InternalServlet");
-
- /**
- * Delete a parameter at the address /internal/api/<parameter>.
- * See the Internal API document for details on how this method should be invoked.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
-
- String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
- if (isProxyOK(req) && isProxyServer()) {
- super.doDelete(req, resp);
- return;
- }
- String key = path.substring(5);
- if (key.length() > 0) {
- Parameters param = Parameters.getParameter(key);
- if (param != null) {
- if (doDelete(param)) {
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- provisioningDataChanged();
- provisioningParametersChanged();
- } else {
- // Something went wrong with the DELETE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- return;
- }
- }
- }
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- }
- /**
- * Get some information (such as a parameter) underneath the /internal/ namespace.
- * See the Internal API document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String path = req.getPathInfo();
- if (path.equals("/halt") && !req.isSecure()) {
- // request to halt the server - can ONLY come from localhost
- String remote = req.getRemoteAddr();
- if (remote.equals("127.0.0.1")) {
- intlogger.info("PROV0009 Request to HALT received.");
- resp.setStatus(HttpServletResponse.SC_OK);
- Main.shutdown();
- } else {
- intlogger.info("PROV0010 Disallowed request to HALT received from "+remote);
- resp.setStatus(HttpServletResponse.SC_FORBIDDEN);
- }
- return;
- }
-
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- if (path.equals("/fetchProv") && !req.isSecure()) {
- // if request came from active_pod or standby_pod and it is not us, reload prov data
- SynchronizerTask s = SynchronizerTask.getSynchronizer();
- s.doFetch();
- resp.setStatus(HttpServletResponse.SC_OK);
- return;
- }
- if (path.equals("/prov")) {
- if (isProxyOK(req) && isProxyServer()) {
- if (super.doGetWithFallback(req, resp))
- return;
- // fall back to returning the local data if the remote is unreachable
- intlogger.info("Active server unavailable; falling back to local copy.");
- }
- Poker p = Poker.getPoker();
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(PROVFULL_CONTENT_TYPE2);
- resp.getOutputStream().print(p.getProvisioningString());
- return;
- }
- if (path.equals("/logs") || path.equals("/logs/")) {
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("application/json");
- resp.getOutputStream().print(generateLogfileList().toString());
- return;
- }
- if (path.startsWith("/logs/")) {
- Properties p = (new DB()).getProperties();
- String logdir = p.getProperty("com.att.research.datarouter.provserver.accesslog.dir");
- String logfile = path.substring(6);
- if (logdir != null && logfile != null && logfile.indexOf('/') < 0) {
- File log = new File(logdir + "/" + logfile);
- if (log.exists() && log.isFile()) {
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
- Path logpath = Paths.get(log.getAbsolutePath());
- Files.copy(logpath, resp.getOutputStream());
- return;
- }
- }
- resp.sendError(HttpServletResponse.SC_NO_CONTENT, "No file.");
- return;
- }
- if (path.startsWith("/api/")) {
- if (isProxyOK(req) && isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String key = path.substring(5);
- if (key.length() > 0) {
- Parameters param = Parameters.getParameter(key);
- if (param != null) {
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
- resp.getOutputStream().print(param.getValue() + "\n");
- return;
- }
- }
- }
- if (path.equals("/drlogs") || path.equals("/drlogs/")) {
- // Special POD <=> POD API to determine what log file records are loaded here
- LogfileLoader lfl = LogfileLoader.getLoader();
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
- resp.getOutputStream().print(lfl.getBitSet().toString());
- return;
- }
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- }
- /**
- * Modify a parameter at the address /internal/api/<parameter>.
- * See the Internal API document for details on how this method should be invoked.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
- if (isProxyOK(req) && isProxyServer()) {
- super.doPut(req, resp);
- return;
- }
- String key = path.substring(5);
- if (key.length() > 0) {
- Parameters param = Parameters.getParameter(key);
- if (param != null) {
- String t = catValues(req.getParameterValues("val"));
- param.setValue(t);
- if (doUpdate(param)) {
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- provisioningDataChanged();
- provisioningParametersChanged();
- } else {
- // Something went wrong with the UPDATE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- return;
- }
- }
- }
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- }
- /**
- * Create some new information (such as a parameter or log entries) underneath the /internal/ namespace.
- * See the Internal API document for details on how this method should be invoked.
- */
- @SuppressWarnings("resource")
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
-
- String path = req.getPathInfo();
- if (path.startsWith("/api/")) {
- if (isProxyOK(req) && isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String key = path.substring(5);
- if (key.length() > 0) {
- Parameters param = Parameters.getParameter(key);
- if (param == null) {
- String t = catValues(req.getParameterValues("val"));
- param = new Parameters(key, t);
- if (doInsert(param)) {
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- provisioningDataChanged();
- provisioningParametersChanged();
- } else {
- // Something went wrong with the INSERT
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- return;
- }
- }
- }
-
- if (path.equals("/logs") || path.equals("/logs/")) {
- String ctype = req.getHeader("Content-Type");
- if (ctype == null || !ctype.equals("text/plain")) {
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- elr.setMessage("Bad media type: "+ctype);
- resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- return;
- }
- String spooldir = (new DB()).getProperties().getProperty("com.att.research.datarouter.provserver.spooldir");
- String spoolname = String.format("%d-%d-", System.currentTimeMillis(), Thread.currentThread().getId());
- synchronized (logseq) {
- // perhaps unnecessary, but it helps make the name unique
- spoolname += logseq.toString();
- logseq++;
- }
- String encoding = req.getHeader("Content-Encoding");
- if (encoding != null) {
- if (encoding.trim().equals("gzip")) {
- spoolname += ".gz";
- } else {
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- return;
- }
- }
- // Determine space available -- available space must be at least 5%
- FileSystem fs = (Paths.get(spooldir)).getFileSystem();
- long total = 0;
- long avail = 0;
- for (FileStore store: fs.getFileStores()) {
- total += store.getTotalSpace();
- avail += store.getUsableSpace();
- }
- try { fs.close(); } catch (Exception e) { }
- if (((avail * 100) / total) < 5) {
- elr.setResult(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
- resp.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
- eventlogger.info(elr);
- return;
- }
- Path tmppath = Paths.get(spooldir, spoolname);
- Path donepath = Paths.get(spooldir, "IN."+spoolname);
- Files.copy(req.getInputStream(), Paths.get(spooldir, spoolname), StandardCopyOption.REPLACE_EXISTING);
- Files.move(tmppath, donepath, StandardCopyOption.REPLACE_EXISTING);
- elr.setResult(HttpServletResponse.SC_CREATED);
- resp.setStatus(HttpServletResponse.SC_CREATED);
- eventlogger.info(elr);
- LogfileLoader.getLoader(); // This starts the logfile loader "task"
- return;
- }
-
- if (path.equals("/drlogs") || path.equals("/drlogs/")) {
- // Receive post request and generate log entries
- String ctype = req.getHeader("Content-Type");
- if (ctype == null || !ctype.equals("text/plain")) {
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- elr.setMessage("Bad media type: "+ctype);
- resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- return;
- }
- InputStream is = req.getInputStream();
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- int ch = 0;
- while ((ch = is.read()) >= 0)
- bos.write(ch);
- RLEBitSet bs = new RLEBitSet(bos.toString()); // The set of records to retrieve
- elr.setResult(HttpServletResponse.SC_OK);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("text/plain");
- LogRecord.printLogRecords(resp.getOutputStream(), bs);
- eventlogger.info(elr);
- return;
- }
-
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- eventlogger.info(elr);
- }
-
- private String catValues(String[] v) {
- StringBuilder sb = new StringBuilder();
- if (v != null) {
- String pfx = "";
- for (String s : v) {
- sb.append(pfx);
- sb.append(s);
- pfx = "|";
- }
- }
- return sb.toString();
- }
- private JSONArray generateLogfileList() {
- JSONArray ja = new JSONArray();
- Properties p = (new DB()).getProperties();
- String s = p.getProperty("com.att.research.datarouter.provserver.accesslog.dir");
- if (s != null) {
- String[] dirs = s.split(",");
- for (String dir : dirs) {
- File f = new File(dir);
- String[] list = f.list();
- if (list != null) {
- for (String s2 : list) {
- if (!s2.startsWith("."))
- ja.put(s2);
- }
- }
- }
- }
- return ja;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/LogServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/LogServlet.java
deleted file mode 100644
index 7ef74d12..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/LogServlet.java
+++ /dev/null
@@ -1,433 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.LOGJSONObject;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.provisioning.beans.DeliveryRecord;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.ExpiryRecord;
-import com.att.research.datarouter.provisioning.beans.LOGJSONable;
-import com.att.research.datarouter.provisioning.beans.PublishRecord;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * This servlet handles requests to the <feedLogURL> and <subLogURL>,
- * which are generated by the provisioning server to handle the log query API.
- *
- * @author Robert Eby
- * @version $Id: LogServlet.java,v 1.11 2014/03/28 17:27:02 eby Exp $
- */
-@SuppressWarnings("serial")
-public class LogServlet extends BaseServlet {
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.LogServlet");
-
- private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);
- private static final String fmt1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";
- private static final String fmt2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
-
- private boolean isfeedlog;
-
- public abstract class RowHandler {
- private final ServletOutputStream out;
- private final String[] fields;
- public boolean firstrow;
-
- public RowHandler(ServletOutputStream out, String fieldparam, boolean b) {
- this.out = out;
- this.firstrow = b;
- this.fields = (fieldparam != null) ? fieldparam.split(":") : null;
- }
- public void handleRow(ResultSet rs) {
- try {
- LOGJSONable js = buildJSONable(rs);
- LOGJSONObject jo = js.asJSONObject();
- if (fields != null) {
- // filter out unwanted fields
- LOGJSONObject j2 = new LOGJSONObject();
- for (String key : fields) {
- Object v = jo.opt(key);
- if (v != null)
- j2.put(key, v);
- }
- jo = j2;
- }
- String t = firstrow ? "\n" : ",\n";
- t += jo.toString();
- out.print(t);
- firstrow = false;
- } catch (Exception e) {
- // ignore
- }
- }
- public abstract LOGJSONable buildJSONable(ResultSet rs) throws SQLException;
- }
- public class PublishRecordRowHandler extends RowHandler {
- public PublishRecordRowHandler(ServletOutputStream out, String fields, boolean b) {
- super(out, fields, b);
- }
- @Override
- public LOGJSONable buildJSONable(ResultSet rs) throws SQLException {
- return new PublishRecord(rs);
- }
- }
- public class DeliveryRecordRowHandler extends RowHandler {
- public DeliveryRecordRowHandler(ServletOutputStream out, String fields, boolean b) {
- super(out, fields, b);
- }
- @Override
- public LOGJSONable buildJSONable(ResultSet rs) throws SQLException {
- return new DeliveryRecord(rs);
- }
- }
- public class ExpiryRecordRowHandler extends RowHandler {
- public ExpiryRecordRowHandler(ServletOutputStream out, String fields, boolean b) {
- super(out, fields, b);
- }
- @Override
- public LOGJSONable buildJSONable(ResultSet rs) throws SQLException {
- return new ExpiryRecord(rs);
- }
- }
-
- /**
- * This class must be created from either a {@link FeedLogServlet} or a {@link SubLogServlet}.
- * @param isFeedLog boolean to handle those places where a feedlog request is different from
- * a sublog request
- */
- protected LogServlet(boolean isFeedLog) {
- this.isfeedlog = isFeedLog;
- }
-
- /**
- * DELETE a logging URL -- not supported.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "DELETE not allowed for the logURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * GET a logging URL -- retrieve logging data for a feed or subscription.
- * See the Logging API document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- int id = getIdFromPath(req);
- if (id < 0) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing or bad feed/subscription number.");
- return;
- }
- Map map = buildMapFromRequest(req);
- if (map.get("err") != null) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments: "+map.get("err"));
- return;
- }
- // check Accept: header??
-
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(LOGLIST_CONTENT_TYPE);
- @SuppressWarnings("resource")
- ServletOutputStream out = resp.getOutputStream();
- final String fields = req.getParameter("fields");
-
- out.print("[");
- if (isfeedlog) {
- // Handle /feedlog/feedid request
- boolean firstrow = true;
-
- // 1. Collect publish records for this feed
- RowHandler rh = new PublishRecordRowHandler(out, fields, firstrow);
- getPublishRecordsForFeed(id, rh, map);
- firstrow = rh.firstrow;
-
- // 2. Collect delivery records for subscriptions to this feed
- rh = new DeliveryRecordRowHandler(out, fields, firstrow);
- getDeliveryRecordsForFeed(id, rh, map);
- firstrow = rh.firstrow;
-
- // 3. Collect expiry records for subscriptions to this feed
- rh = new ExpiryRecordRowHandler(out, fields, firstrow);
- getExpiryRecordsForFeed(id, rh, map);
- } else {
- // Handle /sublog/subid request
- Subscription sub = Subscription.getSubscriptionById(id);
- if (sub != null) {
- // 1. Collect publish records for the feed this subscription feeds
- RowHandler rh = new PublishRecordRowHandler(out, fields, true);
- getPublishRecordsForFeed(sub.getFeedid(), rh, map);
-
- // 2. Collect delivery records for this subscription
- rh = new DeliveryRecordRowHandler(out, fields, rh.firstrow);
- getDeliveryRecordsForSubscription(id, rh, map);
-
- // 3. Collect expiry records for this subscription
- rh = new ExpiryRecordRowHandler(out, fields, rh.firstrow);
- getExpiryRecordsForSubscription(id, rh, map);
- }
- }
- out.print("\n]");
- }
- /**
- * PUT a logging URL -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "PUT not allowed for the logURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * POST a logging URL -- not supported.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- String message = "POST not allowed for the logURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
-
- private Map buildMapFromRequest(HttpServletRequest req) {
- Map map = new HashMap();
- String s = req.getParameter("type");
- if (s != null) {
- if (s.equals("pub") || s.equals("del") || s.equals("exp")) {
- map.put("type", s);
- } else {
- map.put("err", "bad type");
- return map;
- }
- } else
- map.put("type", "all");
- map.put("publishSQL", "");
- map.put("statusSQL", "");
- map.put("resultSQL", "");
- map.put("reasonSQL", "");
-
- s = req.getParameter("publishId");
- if (s != null) {
- if (s.indexOf("'") >= 0) {
- map.put("err", "bad publishId");
- return map;
- }
- map.put("publishSQL", " AND PUBLISH_ID = '"+s+"'");
- }
-
- s = req.getParameter("statusCode");
- if (s != null) {
- String sql = null;
- if (s.equals("success")) {
- sql = " AND STATUS >= 200 AND STATUS < 300";
- } else if (s.equals("redirect")) {
- sql = " AND STATUS >= 300 AND STATUS < 400";
- } else if (s.equals("failure")) {
- sql = " AND STATUS >= 400";
- } else {
- try {
- Integer n = Integer.parseInt(s);
- if ((n >= 100 && n < 600) || (n == -1))
- sql = " AND STATUS = " + n;
- } catch (NumberFormatException e) {
- }
- }
- if (sql == null) {
- map.put("err", "bad statusCode");
- return map;
- }
- map.put("statusSQL", sql);
- map.put("resultSQL", sql.replaceAll("STATUS", "RESULT"));
- }
-
- s = req.getParameter("expiryReason");
- if (s != null) {
- map.put("type", "exp");
- if (s.equals("notRetryable")) {
- map.put("reasonSQL", " AND REASON = 'notRetryable'");
- } else if (s.equals("retriesExhausted")) {
- map.put("reasonSQL", " AND REASON = 'retriesExhausted'");
- } else if (s.equals("diskFull")) {
- map.put("reasonSQL", " AND REASON = 'diskFull'");
- } else if (s.equals("other")) {
- map.put("reasonSQL", " AND REASON = 'other'");
- } else {
- map.put("err", "bad expiryReason");
- return map;
- }
- }
-
- long stime = getTimeFromParam(req.getParameter("start"));
- if (stime < 0) {
- map.put("err", "bad start");
- return map;
- }
- long etime = getTimeFromParam(req.getParameter("end"));
- if (etime < 0) {
- map.put("err", "bad end");
- return map;
- }
- if (stime == 0 && etime == 0) {
- etime = System.currentTimeMillis();
- stime = etime - TWENTYFOUR_HOURS;
- } else if (stime == 0) {
- stime = etime - TWENTYFOUR_HOURS;
- } else if (etime == 0) {
- etime = stime + TWENTYFOUR_HOURS;
- }
- map.put("timeSQL", String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));
- return map;
- }
- private long getTimeFromParam(final String s) {
- if (s == null)
- return 0;
- try {
- // First, look for an RFC 3339 date
- String fmt = (s.indexOf('.') > 0) ? fmt2 : fmt1;
- SimpleDateFormat sdf = new SimpleDateFormat(fmt);
- Date d = sdf.parse(s);
- return d.getTime();
- } catch (ParseException e) {
- }
- try {
- // Also allow a long (in ms); useful for testing
- long n = Long.parseLong(s);
- return n;
- } catch (NumberFormatException e) {
- }
- intlogger.info("Error parsing time="+s);
- return -1;
- }
-
- private void getPublishRecordsForFeed(int feedid, RowHandler rh, Map map) {
- String type = map.get("type");
- if (type.equals("all") || type.equals("pub")) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
- + " AND TYPE = 'pub'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("statusSQL");
- getRecordsForSQL(sql, rh);
- }
- }
- private void getDeliveryRecordsForFeed(int feedid, RowHandler rh, Map map) {
- String type = map.get("type");
- if (type.equals("all") || type.equals("del")) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
- + " AND TYPE = 'del'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("resultSQL");
- getRecordsForSQL(sql, rh);
- }
- }
- private void getDeliveryRecordsForSubscription(int subid, RowHandler rh, Map map) {
- String type = map.get("type");
- if (type.equals("all") || type.equals("del")) {
- String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid
- + " AND TYPE = 'del'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("resultSQL");
- getRecordsForSQL(sql, rh);
- }
- }
- private void getExpiryRecordsForFeed(int feedid, RowHandler rh, Map map) {
- String type = map.get("type");
- if (type.equals("all") || type.equals("exp")) {
- String st = map.get("statusSQL");
- if (st == null || st.length() == 0) {
- String sql = "select * from LOG_RECORDS where FEEDID = "+feedid
- + " AND TYPE = 'exp'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("reasonSQL");
- getRecordsForSQL(sql, rh);
- }
- }
- }
- private void getExpiryRecordsForSubscription(int subid, RowHandler rh, Map map) {
- String type = map.get("type");
- if (type.equals("all") || type.equals("exp")) {
- String st = map.get("statusSQL");
- if (st == null || st.length() == 0) {
- String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid
- + " AND TYPE = 'exp'"
- + map.get("timeSQL") + map.get("publishSQL") + map.get("reasonSQL");
- getRecordsForSQL(sql, rh);
- }
- }
- }
- private void getRecordsForSQL(String sql, RowHandler rh) {
- intlogger.debug(sql);
- long start = System.currentTimeMillis();
- DB db = new DB();
- Connection conn = null;
- try {
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- rh.handleRow(rs);
- }
- rs.close();
- stmt.close();
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
- intlogger.debug("Time: " + (System.currentTimeMillis()-start) + " ms");
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Main.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Main.java
deleted file mode 100644
index 5911ecd8..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Main.java
+++ /dev/null
@@ -1,245 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.security.Security;
-import java.util.Properties;
-import java.util.Timer;
-
-import org.apache.log4j.Logger;
-import org.eclipse.jetty.server.Connector;
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.NCSARequestLog;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.handler.ContextHandlerCollection;
-import org.eclipse.jetty.server.handler.DefaultHandler;
-import org.eclipse.jetty.server.handler.HandlerCollection;
-import org.eclipse.jetty.server.handler.RequestLogHandler;
-import org.eclipse.jetty.server.nio.SelectChannelConnector;
-import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
-import org.eclipse.jetty.servlet.FilterHolder;
-import org.eclipse.jetty.servlet.FilterMapping;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.LogfileLoader;
-import com.att.research.datarouter.provisioning.utils.PurgeLogDirTask;
-import com.att.research.datarouter.provisioning.utils.ThrottleFilter;
-
-/**
- *
- * A main class which may be used to start the provisioning server with an "embedded" Jetty server.
- * Configuration is done via the properties file provserver.properties , which should be in the CLASSPATH.
- * The provisioning server may also be packaged with a web.xml and started as a traditional webapp.
- *
- *
- * Most of the work of the provisioning server is carried out within the eight servlets (configured below)
- * that are used to handle each of the eight types of requests the server may receive.
- * In addition, there are background threads started to perform other tasks:
- *
- *
- * One background Thread runs the {@link LogfileLoader} in order to process incoming logfiles.
- * This Thread is created as a side effect of the first successful POST to the /internal/logs/ servlet.
- * One background Thread runs the {@link SynchronizerTask} which is used to periodically
- * synchronize the database between active and standby servers.
- * One background Thread runs the {@link Poker} which is used to notify the nodes whenever
- * provisioning data changes.
- * One task is run once a day to run {@link PurgeLogDirTask} which purges older logs from the
- * /opt/app/datartr/logs directory.
- *
- *
- * The provisioning server is stopped by issuing a GET to the URL http://127.0.0.1/internal/halt
- * using curl or some other such tool.
- *
- *
- * @author Robert Eby
- * @version $Id: Main.java,v 1.12 2014/03/12 19:45:41 eby Exp $
- */
-public class Main {
- /** The truststore to use if none is specified */
- public static final String DEFAULT_TRUSTSTORE = "/opt/java/jdk/jdk180/jre/lib/security/cacerts";
- public static final String KEYSTORE_TYPE_PROPERTY = "com.att.research.datarouter.provserver.keystore.type";
- public static final String KEYSTORE_PATH_PROPERTY = "com.att.research.datarouter.provserver.keystore.path";
- public static final String KEYSTORE_PASSWORD_PROPERTY = "com.att.research.datarouter.provserver.keystore.password";
- public static final String TRUSTSTORE_PATH_PROPERTY = "com.att.research.datarouter.provserver.truststore.path";
- public static final String TRUSTSTORE_PASSWORD_PROPERTY = "com.att.research.datarouter.provserver.truststore.password";
-
- /** The one and only {@link Server} instance in this JVM */
- private static Server server;
-
- /**
- * Starts the Data Router Provisioning server.
- * @param args not used
- * @throws Exception if Jetty has a problem starting
- */
- public static void main(String[] args) throws Exception {
- Security.setProperty("networkaddress.cache.ttl", "4");
- Logger logger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
-
- // Check DB is accessible and contains the expected tables
- if (! checkDatabase(logger))
- System.exit(1);
-
- logger.info("PROV0000 **** AT&T Data Router Provisioning Server starting....");
-
- // Get properties
- Properties p = (new DB()).getProperties();
- int http_port = Integer.parseInt(p.getProperty("com.att.research.datarouter.provserver.http.port", "8080"));
- int https_port = Integer.parseInt(p.getProperty("com.att.research.datarouter.provserver.https.port", "8443"));
-
- // HTTP connector
- SelectChannelConnector http = new SelectChannelConnector();
- http.setPort(http_port);
- http.setMaxIdleTime(300000);
- http.setRequestHeaderSize(2048);
- http.setAcceptors(2);
- http.setConfidentialPort(https_port);
- http.setLowResourcesConnections(20000);
-
- // HTTPS connector
- SslSelectChannelConnector https = new SslSelectChannelConnector();
- https.setPort(https_port);
- https.setMaxIdleTime(30000);
- https.setRequestHeaderSize(8192);
- https.setAcceptors(2);
-
- // SSL stuff
- SslContextFactory cf = https.getSslContextFactory();
-
- /**Skip SSLv3 Fixes*/
- cf.addExcludeProtocols("SSLv3");
- logger.info("Excluded protocols prov-"+cf.getExcludeProtocols());
- /**End of SSLv3 Fixes*/
-
- cf.setKeyStoreType(p.getProperty(KEYSTORE_TYPE_PROPERTY, "jks"));
- cf.setKeyStorePath(p.getProperty(KEYSTORE_PATH_PROPERTY));
- cf.setKeyStorePassword(p.getProperty(KEYSTORE_PASSWORD_PROPERTY));
- cf.setKeyManagerPassword(p.getProperty("com.att.research.datarouter.provserver.keymanager.password"));
- String ts = p.getProperty(TRUSTSTORE_PATH_PROPERTY);
- if (ts != null && ts.length() > 0) {
- System.out.println("@@ TS -> "+ts);
- cf.setTrustStore(ts);
- cf.setTrustStorePassword(p.getProperty(TRUSTSTORE_PASSWORD_PROPERTY));
- } else {
- cf.setTrustStore(DEFAULT_TRUSTSTORE);
- cf.setTrustStorePassword("changeit");
- }
- cf.setTrustStore("/opt/app/datartr/self_signed/cacerts.jks");
- cf.setTrustStorePassword("changeit");
- cf.setWantClientAuth(true);
-
- // Servlet and Filter configuration
- ServletContextHandler ctxt = new ServletContextHandler(0);
- ctxt.setContextPath("/");
- ctxt.addServlet(new ServletHolder(new FeedServlet()), "/feed/*");
- ctxt.addServlet(new ServletHolder(new FeedLogServlet()), "/feedlog/*");
- ctxt.addServlet(new ServletHolder(new PublishServlet()), "/publish/*");
- ctxt.addServlet(new ServletHolder(new SubscribeServlet()), "/subscribe/*");
- ctxt.addServlet(new ServletHolder(new StatisticsServlet()), "/statistics/*");
- ctxt.addServlet(new ServletHolder(new SubLogServlet()), "/sublog/*");
- ctxt.addServlet(new ServletHolder(new GroupServlet()), "/group/*"); //Provision groups - Rally US708115 -1610
- ctxt.addServlet(new ServletHolder(new SubscriptionServlet()), "/subs/*");
- ctxt.addServlet(new ServletHolder(new InternalServlet()), "/internal/*");
- ctxt.addServlet(new ServletHolder(new RouteServlet()), "/internal/route/*");
- ctxt.addServlet(new ServletHolder(new DRFeedsServlet()), "/");
- ctxt.addFilter (new FilterHolder (new ThrottleFilter()), "/publish/*", FilterMapping.REQUEST);
-
- ContextHandlerCollection contexts = new ContextHandlerCollection();
- contexts.addHandler(ctxt);
-
- // Request log configuration
- NCSARequestLog nrl = new NCSARequestLog();
- nrl.setFilename(p.getProperty("com.att.research.datarouter.provserver.accesslog.dir") + "/request.log.yyyy_mm_dd");
- nrl.setFilenameDateFormat("yyyyMMdd");
- nrl.setRetainDays(90);
- nrl.setAppend(true);
- nrl.setExtended(false);
- nrl.setLogCookies(false);
- nrl.setLogTimeZone("GMT");
-
- RequestLogHandler reqlog = new RequestLogHandler();
- reqlog.setRequestLog(nrl);
-
- // Server's Handler collection
- HandlerCollection hc = new HandlerCollection();
- hc.setHandlers(new Handler[] { contexts, new DefaultHandler() });
- hc.addHandler(reqlog);
-
- // Server's thread pool
- QueuedThreadPool pool = new QueuedThreadPool();
- pool.setMinThreads(10);
- pool.setMaxThreads(200);
- pool.setDetailedDump(false);
-
- // Daemon to clean up the log directory on a daily basis
- Timer rolex = new Timer();
- rolex.scheduleAtFixedRate(new PurgeLogDirTask(), 0, 86400000L); // run once per day
-
- // Start LogfileLoader
- LogfileLoader.getLoader();
-
- // The server itself
- server = new Server();
- server.setThreadPool(pool);
- server.setConnectors(new Connector[] { http, https });
- server.setHandler(hc);
- server.setStopAtShutdown(true);
- server.setSendServerVersion(true);
- server.setSendDateHeader(true);
- server.setGracefulShutdown(5000); // allow 5 seconds for servlets to wrap up
- server.setDumpAfterStart(false);
- server.setDumpBeforeStop(false);
-
- server.start();
- server.join();
- logger.info("PROV0001 **** AT&T Data Router Provisioning Server halted.");
- }
-
- private static boolean checkDatabase(Logger logger) {
- DB db = new DB();
- return db.runRetroFits();
- }
-
- /**
- * Stop the Jetty server.
- */
- public static void shutdown() {
- new Thread() {
- @Override
- public void run() {
- try {
- server.stop();
- Thread.sleep(5000L);
- System.exit(0);
- } catch (Exception e) {
- // ignore
- }
- }
- }.start();
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Poker.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Poker.java
deleted file mode 100644
index 13350df0..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/Poker.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.HttpURLConnection;
-import java.net.InetAddress;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.UnknownHostException;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.TreeSet;
-
-import javax.servlet.ServletException;
-
-import org.apache.log4j.Logger;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-import com.att.research.datarouter.provisioning.beans.EgressRoute;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.beans.IngressRoute;
-import com.att.research.datarouter.provisioning.beans.NetworkRoute;
-import com.att.research.datarouter.provisioning.beans.Parameters;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.beans.Group; //Groups feature Rally:US708115 - 1610
-import com.att.research.datarouter.provisioning.utils.*;
-
-/**
- * This class handles the two timers (described in R1 Design Notes), and takes care of issuing
- * the GET to each node of the URL to "poke".
- *
- * @author Robert Eby
- * @version $Id: Poker.java,v 1.11 2014/01/08 16:13:47 eby Exp $
- */
-public class Poker extends TimerTask {
- /** Template used to generate the URL to issue the GET against */
- public static final String POKE_URL_TEMPLATE = "http://%s/internal/fetchProv";
-
-
-
-
- /** This is a singleton -- there is only one Poker object in the server */
- private static Poker p;
-
- /**
- * Get the singleton Poker object.
- * @return the Poker
- */
- public static synchronized Poker getPoker() {
- if (p == null)
- p = new Poker();
- return p;
- }
-
- private long timer1;
- private long timer2;
- private Timer rolex;
- private String this_pod; // DNS name of this machine
- private Logger logger;
- private String provstring;
-
- private Poker() {
- timer1 = timer2 = 0;
- rolex = new Timer();
- logger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- try {
- this_pod = InetAddress.getLocalHost().getHostName();
- } catch (UnknownHostException e) {
- this_pod = "*UNKNOWN*"; // not a major problem
- }
- provstring = buildProvisioningString();
-
- rolex.scheduleAtFixedRate(this, 0L, 1000L); // Run once a second to check the timers
- }
-
- /**
- * This method sets the two timers described in the design notes.
- * @param t1 the first timer controls how long to wait after a provisioning request before poking each node
- * This timer can be reset if it has not "gone off".
- * @param t2 the second timer set the outer bound on how long to wait. It cannot be reset.
- */
- public void setTimers(long t1, long t2) {
- synchronized (this_pod) {
- if (timer1 == 0 || t1 > timer1)
- timer1 = t1;
- if (timer2 == 0)
- timer2 = t2;
- }
- if (logger.isDebugEnabled())
- logger.debug("Poker timers set to " + timer1 + " and " + timer2);
-
-
- }
-
- /**
- * Return the last provisioning string built.
- * @return the last provisioning string built.
- */
- public String getProvisioningString() {
- return provstring;
- }
-
- /**
- * The method to run at the predefined interval (once per second). This method checks
- * to see if either of the two timers has expired, and if so, will rebuild the provisioning
- * string, and poke all the nodes and other PODs. The timers are then reset to 0.
- */
- @Override
- public void run() {
- try {
- if (timer1 > 0) {
- long now = System.currentTimeMillis();
- boolean fire = false;
- synchronized (this_pod) {
- if (now > timer1 || now > timer2) {
- timer1 = timer2 = 0;
- fire = true;
- }
- }
- if (fire) {
- // Rebuild the prov string
- provstring = buildProvisioningString();
-
- // Only the active POD should poke nodes, etc.
- boolean active = SynchronizerTask.getSynchronizer().isActive();
- if (active) {
- // Poke all the DR nodes
- for (String n : BaseServlet.getNodes()) {
- pokeNode(n);
- }
- // Poke the pod that is not us
- for (String n : BaseServlet.getPods()) {
- if (n.length() > 0 && !n.equals(this_pod))
- pokeNode(n);
- }
- }
- }
- }
- } catch (Exception e) {
- logger.warn("PROV0020: Caught exception in Poker: "+e);
- e.printStackTrace();
- }
- }
- private void pokeNode(final String nodename) {
- logger.debug("PROV0012 Poking node " + nodename + " ...");
- Runnable r = new Runnable() {
- @Override
- public void run() {
-
- try {
- String u = String.format(POKE_URL_TEMPLATE, nodename+":"+DB.HTTP_PORT);
- URL url = new URL(u);
- HttpURLConnection conn = (HttpURLConnection) url.openConnection();
- conn.setConnectTimeout(60000); //Fixes for Itrack DATARTR-3, poke timeout
- conn.connect();
- conn.getContentLength(); // Force the GET through
- conn.disconnect();
- } catch (MalformedURLException e) {
- logger.warn("PROV0013 MalformedURLException Error poking node "+nodename+": " + e.getMessage());
- } catch (IOException e) {
- logger.warn("PROV0013 IOException Error poking node "+nodename+": " + e.getMessage());
- }
- }
- };
-// Thread t = new Thread(r);
-// t.start();
- r.run();
- }
- @SuppressWarnings("unused")
- private String buildProvisioningString() {
- StringBuilder sb = new StringBuilder("{\n");
-
- // Append Feeds to the string
- String pfx = "\n";
- sb.append("\"feeds\": [");
- for (Feed f : Feed.getAllFeeds()) {
- sb.append(pfx);
- sb.append(f.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n],\n");
-
- //Append groups to the string - Rally:US708115 - 1610
- pfx = "\n";
- sb.append("\"groups\": [");
- for (Group s : Group.getAllgroups()) {
- sb.append(pfx);
- sb.append(s.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n],\n");
-
-
- // Append Subscriptions to the string
- pfx = "\n";
- sb.append("\"subscriptions\": [");
- for (Subscription s : Subscription.getAllSubscriptions()) {
- sb.append(pfx);
- if(s!=null)
- sb.append(s.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n],\n");
-
- // Append Parameters to the string
- pfx = "\n";
- sb.append("\"parameters\": {");
- Map props = Parameters.getParameters();
- Set ivals = new HashSet();
- String intv = props.get("_INT_VALUES");
- if (intv != null)
- ivals.addAll(Arrays.asList(intv.split("\\|")));
- for (String key : new TreeSet(props.keySet())) {
- String v = props.get(key);
- sb.append(pfx);
- sb.append(" \"").append(key).append("\": ");
- if (ivals.contains(key)) {
- // integer value
- sb.append(v);
- } else if (key.endsWith("S")) {
- // Split and append array of strings
- String[] pp = v.split("\\|");
- String p2 = "";
- sb.append("[");
- for (String t : pp) {
- sb.append(p2).append("\"").append(quote(t)).append("\"");
- p2 = ",";
- }
- sb.append("]");
- } else {
- sb.append("\"").append(quote(v)).append("\"");
- }
- pfx = ",\n";
- }
- sb.append("\n},\n");
-
- // Append Routes to the string
- pfx = "\n";
- sb.append("\"ingress\": [");
- for (IngressRoute in : IngressRoute.getAllIngressRoutes()) {
- sb.append(pfx);
- sb.append(in.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n],\n");
-
- pfx = "\n";
- sb.append("\"egress\": {");
- for (EgressRoute eg : EgressRoute.getAllEgressRoutes()) {
- sb.append(pfx);
- String t = eg.asJSONObject().toString();
- t = t.substring(1, t.length()-1);
- sb.append(t);
- pfx = ",\n";
- }
- sb.append("\n},\n");
-
- pfx = "\n";
- sb.append("\"routing\": [");
- for (NetworkRoute ne : NetworkRoute.getAllNetworkRoutes()) {
- sb.append(pfx);
- sb.append(ne.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n]");
- sb.append("\n}");
-
- // Convert to string and verify it is valid JSON
- String provstring = sb.toString();
- try {
- new JSONObject(new JSONTokener(provstring));
- } catch (JSONException e) {
- logger.warn("PROV0016: Possible invalid prov string: "+e);
- }
- return provstring;
- }
- private String quote(String s) {
- StringBuilder sb = new StringBuilder();
- for (char ch : s.toCharArray()) {
- if (ch == '\\' || ch == '"') {
- sb.append('\\');
- }
- sb.append(ch);
- }
- return sb.toString();
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/ProxyServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/ProxyServlet.java
deleted file mode 100644
index b22b018f..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/ProxyServlet.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.security.KeyStore;
-import java.security.KeyStoreException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Properties;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.http.Header;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.conn.scheme.Scheme;
-import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.entity.BasicHttpEntity;
-import org.apache.http.impl.client.AbstractHttpClient;
-import org.apache.http.impl.client.DefaultHttpClient;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.URLUtilities;
-
-/**
- * This class is the base class for those servlets that need to proxy their requests from the
- * standby to active server. Its methods perform the proxy function to the active server. If the
- * active server is not reachable, a 503 (SC_SERVICE_UNAVAILABLE) is returned. Only
- * DELETE/GET/PUT/POST are supported.
- *
- * @author Robert Eby
- * @version $Id: ProxyServlet.java,v 1.3 2014/03/24 18:47:10 eby Exp $
- */
-@SuppressWarnings("serial")
-public class ProxyServlet extends BaseServlet {
- private boolean inited = false;
- private Scheme sch;
-
- /**
- * Initialize this servlet, by setting up SSL.
- */
- @SuppressWarnings("deprecation")
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- try {
- // Set up keystore
- Properties props = (new DB()).getProperties();
- String type = props.getProperty(Main.KEYSTORE_TYPE_PROPERTY, "jks");
- String store = props.getProperty(Main.KEYSTORE_PATH_PROPERTY);
- String pass = props.getProperty(Main.KEYSTORE_PASSWORD_PROPERTY);
- KeyStore keyStore = readStore(store, pass, type);
-
- store = props.getProperty(Main.TRUSTSTORE_PATH_PROPERTY);
- pass = props.getProperty(Main.TRUSTSTORE_PASSWORD_PROPERTY);
- if (store == null || store.length() == 0) {
- store = Main.DEFAULT_TRUSTSTORE;
- pass = "changeit";
- }
- KeyStore trustStore = readStore(store, pass, KeyStore.getDefaultType());
-
- // We are connecting with the node name, but the certificate will have the CNAME
- // So we need to accept a non-matching certificate name
- SSLSocketFactory socketFactory = new SSLSocketFactory(keyStore, "changeit", trustStore);
- socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
- sch = new Scheme("https", 443, socketFactory);
- inited = true;
- } catch (Exception e) {
- e.printStackTrace();
- }
- intlogger.info("ProxyServlet: inited = "+inited);
- }
- private KeyStore readStore(String store, String pass, String type) throws KeyStoreException, FileNotFoundException {
- KeyStore ks = KeyStore.getInstance(type);
- FileInputStream instream = new FileInputStream(new File(store));
- try {
- ks.load(instream, pass.toCharArray());
- } catch (Exception x) {
- System.err.println("READING TRUSTSTORE: "+x);
- } finally {
- try { instream.close(); } catch (Exception ignore) {}
- }
- return ks;
- }
- /**
- * Return true if the requester has NOT set the noproxy CGI variable.
- * If they have, this indicates they want to forcibly turn the proxy off.
- * @param req the HTTP request
- * @return true or false
- */
- protected boolean isProxyOK(final HttpServletRequest req) {
- String t = req.getQueryString();
- if (t != null) {
- t = t.replaceAll("&", "&");
- for (String s : t.split("&")) {
- if (s.equals("noproxy") || s.startsWith("noproxy="))
- return false;
- }
- }
- return true;
- }
- /**
- * Is this the standby server? If it is, the proxy functions can be used.
- * If not, the proxy functions should not be called, and will send a response of 500
- * (Internal Server Error).
- * @return true if this server is the standby (and hence a proxy server).
- */
- public boolean isProxyServer() {
- SynchronizerTask st = SynchronizerTask.getSynchronizer();
- return st.getState() == SynchronizerTask.STANDBY;
- }
- /**
- * Issue a proxy DELETE to the active provisioning server.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- doProxy(req, resp, "DELETE");
- }
- /**
- * Issue a proxy GET to the active provisioning server.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- doProxy(req, resp, "GET");
- }
- /**
- * Issue a proxy PUT to the active provisioning server.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- doProxy(req, resp, "PUT");
- }
- /**
- * Issue a proxy POST to the active provisioning server.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- doProxy(req, resp, "POST");
- }
- /**
- * Issue a proxy GET to the active provisioning server. Unlike doGet() above,
- * this method will allow the caller to fall back to other code if the remote server is unreachable.
- * @return true if the proxy succeeded
- */
- public boolean doGetWithFallback(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- boolean rv = false;
- if (inited) {
- String url = buildUrl(req);
- intlogger.info("ProxyServlet: proxying with fallback GET "+url);
- AbstractHttpClient httpclient = new DefaultHttpClient();
- HttpRequestBase proxy = new HttpGet(url);
- try {
- httpclient.getConnectionManager().getSchemeRegistry().register(sch);
-
- // Copy request headers and request body
- copyRequestHeaders(req, proxy);
-
- // Execute the request
- HttpResponse pxy_response = httpclient.execute(proxy);
-
- // Get response headers and body
- int code = pxy_response.getStatusLine().getStatusCode();
- resp.setStatus(code);
- copyResponseHeaders(pxy_response, resp);
-
- HttpEntity entity = pxy_response.getEntity();
- if (entity != null) {
- InputStream in = entity.getContent();
- IOUtils.copy(in, resp.getOutputStream());
- in.close();
- }
- rv = true;
- } catch (IOException e) {
- System.err.println("ProxyServlet: "+e);
- e.printStackTrace();
- } finally {
- proxy.releaseConnection();
- httpclient.getConnectionManager().shutdown();
- }
- } else {
- intlogger.warn("ProxyServlet: proxy disabled");
- }
- return rv;
- }
- private void doProxy(HttpServletRequest req, HttpServletResponse resp, final String method) throws IOException {
- if (inited && isProxyServer()) {
- String url = buildUrl(req);
- intlogger.info("ProxyServlet: proxying "+method + " "+url);
- AbstractHttpClient httpclient = new DefaultHttpClient();
- ProxyHttpRequest proxy = new ProxyHttpRequest(method, url);
- try {
- httpclient.getConnectionManager().getSchemeRegistry().register(sch);
-
- // Copy request headers and request body
- copyRequestHeaders(req, proxy);
- if (method.equals("POST") || method.equals("PUT")){
- BasicHttpEntity body = new BasicHttpEntity();
- body.setContent(req.getInputStream());
- body.setContentLength(-1); // -1 = unknown
- proxy.setEntity(body);
- }
-
- // Execute the request
- HttpResponse pxy_response = httpclient.execute(proxy);
-
- // Get response headers and body
- int code = pxy_response.getStatusLine().getStatusCode();
- resp.setStatus(code);
- copyResponseHeaders(pxy_response, resp);
-
- HttpEntity entity = pxy_response.getEntity();
- if (entity != null) {
- InputStream in = entity.getContent();
- IOUtils.copy(in, resp.getOutputStream());
- in.close();
- }
- } catch (IOException e) {
- intlogger.warn("ProxyServlet: "+e);
- resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE);
- e.printStackTrace();
- } finally {
- proxy.releaseConnection();
- httpclient.getConnectionManager().shutdown();
- }
- } else {
- intlogger.warn("ProxyServlet: proxy disabled");
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- }
- }
- private String buildUrl(HttpServletRequest req) {
- StringBuilder sb = new StringBuilder("https://");
- sb.append(URLUtilities.getPeerPodName());
- sb.append(req.getRequestURI());
- String q = req.getQueryString();
- if (q != null)
- sb.append("?").append(q);
- return sb.toString();
- }
- private void copyRequestHeaders(HttpServletRequest from, HttpRequestBase to) {
- @SuppressWarnings("unchecked")
- List list = Collections.list(from.getHeaderNames());
- for (String name : list) {
- // Proxy code will add this one
- if (!name.equalsIgnoreCase("Content-Length"))
- to.addHeader(name, from.getHeader(name));
- }
- }
- private void copyResponseHeaders(HttpResponse from, HttpServletResponse to) {
- for (Header hdr : from.getAllHeaders()) {
- // Don't copy Date: our Jetty will add another Date header
- if (!hdr.getName().equals("Date"))
- to.addHeader(hdr.getName(), hdr.getValue());
- }
- }
-
- public class ProxyHttpRequest extends HttpEntityEnclosingRequestBase {
- private final String method;
-
- public ProxyHttpRequest(final String method, final String uri) {
- super();
- this.method = method;
- setURI(URI.create(uri));
- }
- @Override
- public String getMethod() {
- return method;
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/PublishServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/PublishServlet.java
deleted file mode 100644
index 2a8e2e3c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/PublishServlet.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Properties;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.beans.IngressRoute;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-import com.att.research.datarouter.provisioning.utils.*;
-
-/**
- * This servlet handles redirects for the <publishURL> on the provisioning server,
- * which is generated by the provisioning server to handle a particular subscriptions to a feed.
- * See the File Publishing and Delivery API document for details on how these methods
- * should be invoked.
- *
- * @author Robert Eby
- * @version $Id: PublishServlet.java,v 1.8 2014/03/12 19:45:41 eby Exp $
- */
-@SuppressWarnings("serial")
-public class PublishServlet extends BaseServlet {
- private int next_node;
- private String provstring;
- private List irt;
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.PublishServlet");
-
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- next_node = 0;
- provstring = "";
- irt = new ArrayList();
-
- }
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- redirect(req, resp);
- }
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- redirect(req, resp);
- }
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- redirect(req, resp);
- }
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- redirect(req, resp);
- }
- private void redirect(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String[] nodes = getNodes();
- if (nodes == null || nodes.length == 0) {
- resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "There are no nodes defined in the DR network.");
- } else {
- EventLogRecord elr = new EventLogRecord(req);
- int feedid = checkPath(req);
- if (feedid < 0) {
- String message = (feedid == -1)
- ? "Invalid request - Missing or bad feed number."
- : "Invalid request - Missing file ID.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
-
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- } else {
- // Generate new URL
- String nextnode = getRedirectNode(feedid, req);
- nextnode = nextnode+":"+DB.HTTPS_PORT;
- String newurl = "https://" + nextnode + "/publish" + req.getPathInfo();
- String qs = req.getQueryString();
- if (qs != null)
- newurl += "?" + qs;
-
- // Log redirect in event log
- String message = "Redirected to: "+newurl;
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_MOVED_PERMANENTLY);
- eventlogger.info(elr);
-
- resp.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
- resp.setHeader("Location", newurl);
- }
- }
- }
- private String getRedirectNode(int feedid, HttpServletRequest req) {
- // Check to see if the IRT needs to be updated
- Poker p = Poker.getPoker();
- String s = p.getProvisioningString();
- synchronized (provstring) {
- if (irt == null || (s.length() != provstring.length()) || !s.equals(provstring)) {
- // Provisioning string has changed -- update the IRT
- provstring = s;
- JSONObject jo = new JSONObject(new JSONTokener(provstring));
- JSONArray ja = jo.getJSONArray("ingress");
- List newlist = new ArrayList();
- for (int i = 0; i < ja.length(); i++) {
- IngressRoute iroute = new IngressRoute(ja.getJSONObject(i));
- newlist.add(iroute);
- }
- irt = newlist;
- }
- }
-
- // Look in IRT for next node
- for (IngressRoute route : irt) {
- if (route.matches(feedid, req)) {
- // pick a node at random from the list
- Collection nodes = route.getNodes();
- String[] arr = nodes.toArray(new String[0]);
- long id = System.currentTimeMillis() % arr.length;
- String node = arr[(int) id];
- intlogger.info("Redirecting to "+node+" because of route "+route);
- return node;
- }
- }
-
- // No IRT rule matches, do round robin of all active nodes
- String[] nodes = getNodes();
- if (next_node >= nodes.length) // The list of nodes may have grown/shrunk
- next_node = 0;
- return nodes[next_node++];
- }
- private int checkPath(HttpServletRequest req) {
- String path = req.getPathInfo();
- if (path == null || path.length() < 2)
- return -1;
- path = path.substring(1);
- int ix = path.indexOf('/');
- if (ix < 0 || ix == path.length()-1)
- return -2;
- try {
- int feedid = Integer.parseInt(path.substring(0, ix));
- if (!Feed.isFeedValid(feedid))
- return -1;
- return feedid;
- } catch (NumberFormatException e) {
- return -1;
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/RouteServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/RouteServlet.java
deleted file mode 100644
index 68fd4c71..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/RouteServlet.java
+++ /dev/null
@@ -1,429 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.util.Set;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.beans.Deleteable;
-import com.att.research.datarouter.provisioning.beans.EgressRoute;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.IngressRoute;
-import com.att.research.datarouter.provisioning.beans.Insertable;
-import com.att.research.datarouter.provisioning.beans.NetworkRoute;
-import com.att.research.datarouter.provisioning.beans.NodeClass;
-
-/**
- *
- * This servlet handles requests to URLs under /internal/route/ on the provisioning server.
- * This part of the URL tree is used to manipulate the Data Router routing tables.
- * These include:
- *
- *
- *
- * URL Path Summary
- *
- * URL Path
- * Method
- * Purpose
- *
- *
- * /internal/route/
- * GET
- * used to GET a full JSON copy of all three routing tables.
- *
- *
- * /internal/route/ingress/
- * GET
- * used to GET a full JSON copy of the ingress routing table (IRT).
- *
- *
- * POST
- * used to create a new entry in the ingress routing table (IRT).
- *
- * /internal/route/egress/
- * GET
- * used to GET a full JSON copy of the egress routing table (ERT).
- *
- *
- * POST
- * used to create a new entry in the egress routing table (ERT).
- *
- * /internal/route/network/
- * GET
- * used to GET a full JSON copy of the network routing table (NRT).
- *
- *
- * POST
- * used to create a new entry in the network routing table (NRT).
- *
- *
- * /internal/route/ingress/<feed>/<user>/<subnet>
- * DELETE
- * used to DELETE the ingress route corresponding to feed , user and subnet .
- * The / in the subnet specified should be replaced with a !, since / cannot be used in a URL.
- *
- *
- * /internal/route/ingress/<seq>
- * DELETE
- * used to DELETE all ingress routes with the matching seq sequence number.
- *
- *
- * /internal/route/egress/<sub>
- * DELETE
- * used to DELETE the egress route the matching sub subscriber number.
- *
- *
- * /internal/route/network/<fromnode>/<tonode>
- * DELETE
- * used to DELETE the network route corresponding to fromnode
- * and tonode .
- *
- *
- *
- * Authorization to use these URLs is a little different than for other URLs on the provisioning server.
- * For the most part, the IP address that the request comes from should be either:
- *
- *
- * an IP address of a provisioning server, or
- * the IP address of a node, or
- * an IP address from the "special subnet " which is configured with
- * the PROV_SPECIAL_SUBNET parameter.
- *
- *
- * All DELETE/GET/POST requests made to this servlet on the standby server are proxied to the
- * active server (using the {@link ProxyServlet}) if it is up and reachable.
- *
- *
- * @author Robert Eby
- * @version $Id$
- */
-@SuppressWarnings("serial")
-public class RouteServlet extends ProxyServlet {
- /**
- * DELETE route table entries by deleting part of the route table tree.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- if (isProxyOK(req) && isProxyServer()) {
- super.doDelete(req, resp);
- return;
- }
-
- String path = req.getPathInfo();
- String[] parts = path.substring(1).split("/");
- Deleteable[] d = null;
- if (parts[0].equals("ingress")) {
- if (parts.length == 4) {
- // /internal/route/ingress/
//
- try {
- int feedid = Integer.parseInt(parts[1]);
- IngressRoute er = IngressRoute.getIngressRoute(feedid, parts[2], parts[3].replaceAll("!", "/"));
- if (er == null) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "The specified ingress route does not exist.");
- return;
- }
- d = new Deleteable[] { er };
- } catch (NumberFormatException e) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid feed ID in 'delete ingress' command.");
- return;
- }
- } else if (parts.length == 2) {
- // /internal/route/ingress/
- try {
- int seq = Integer.parseInt(parts[1]);
- Set set = IngressRoute.getIngressRoutesForSeq(seq);
- d = set.toArray(new Deleteable[0]);
- } catch (NumberFormatException e) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid sequence number in 'delete ingress' command.");
- return;
- }
- } else {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid number of arguments in 'delete ingress' command.");
- return;
- }
- } else if (parts[0].equals("egress")) {
- if (parts.length == 2) {
- // /internal/route/egress/
- try {
- int subid = Integer.parseInt(parts[1]);
- EgressRoute er = EgressRoute.getEgressRoute(subid);
- if (er == null) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "The specified egress route does not exist.");
- return;
- }
- d = new Deleteable[] { er };
- } catch (NumberFormatException e) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid sub ID in 'delete egress' command.");
- return;
- }
- } else {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid number of arguments in 'delete egress' command.");
- return;
- }
- } else if (parts[0].equals("network")) {
- if (parts.length == 3) {
- // /internal/route/network//
- try {//
- NetworkRoute nr = new NetworkRoute(
- NodeClass.normalizeNodename(parts[1]),
- NodeClass.normalizeNodename(parts[2])
- );
- d = new Deleteable[] { nr };
- } catch (IllegalArgumentException e) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "The specified network route does not exist.");
- return;
- }
- } else {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Invalid number of arguments in 'delete network' command.");
- return;
- }
- }
- if (d == null) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- return;
- }
- boolean rv = true;
- for (Deleteable dd : d) {
- rv &= doDelete(dd);
- }
- if (rv) {
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- provisioningDataChanged();
- provisioningParametersChanged();
- } else {
- // Something went wrong with the DELETE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * GET route table entries from the route table tree specified by the URL path.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- if (isProxyOK(req) && isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
-
- String path = req.getPathInfo();
- if (!path.endsWith("/"))
- path += "/";
- if (!path.equals("/") && !path.equals("/ingress/") && !path.equals("/egress/") && !path.equals("/network/")) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- return;
- }
-
- StringBuilder sb = new StringBuilder("{\n");
- String px2 = "";
- if (path.equals("/") || path.equals("/ingress/")) {
- String pfx = "\n";
- sb.append("\"ingress\": [");
- for (IngressRoute in : IngressRoute.getAllIngressRoutes()) {
- sb.append(pfx);
- sb.append(in.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n]");
- px2 = ",\n";
- }
-
- if (path.equals("/") || path.equals("/egress/")) {
- String pfx = "\n";
- sb.append(px2);
- sb.append("\"egress\": {");
- for (EgressRoute eg : EgressRoute.getAllEgressRoutes()) {
- JSONObject jx = eg.asJSONObject();
- for (String key : jx.keySet()) {
- sb.append(pfx);
- sb.append(" \"").append(key).append("\": ");
- sb.append("\"").append(jx.getString(key)).append("\"");
- pfx = ",\n";
- }
- }
- sb.append("\n}");
- px2 = ",\n";
- }
-
- if (path.equals("/") || path.equals("/network/")) {
- String pfx = "\n";
- sb.append(px2);
- sb.append("\"routing\": [");
- for (NetworkRoute ne : NetworkRoute.getAllNetworkRoutes()) {
- sb.append(pfx);
- sb.append(ne.asJSONObject().toString());
- pfx = ",\n";
- }
- sb.append("\n]");
- }
- sb.append("}\n");
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType("application/json");
- resp.getOutputStream().print(sb.toString());
- }
- /**
- * PUT on </internal/route/*> -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- }
- /**
- * POST - modify existing route table entries in the route table tree specified by the URL path.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- EventLogRecord elr = new EventLogRecord(req);
- if (!isAuthorizedForInternal(req)) {
- elr.setMessage("Unauthorized.");
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthorized.");
- return;
- }
- if (isProxyOK(req) && isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String path = req.getPathInfo();
- Insertable[] ins = null;
- if (path.startsWith("/ingress/")) {
- // /internal/route/ingress/?feed=%s&user=%s&subnet=%s&nodepatt=%s
- try {
- // Although it probably doesn't make sense, you can install two identical routes in the IRT
- int feedid = Integer.parseInt(req.getParameter("feed"));
- String user = req.getParameter("user");
- if (user == null)
- user = "-";
- String subnet = req.getParameter("subnet");
- if (subnet == null)
- subnet = "-";
- String nodepatt = req.getParameter("nodepatt");
- String t = req.getParameter("seq");
- int seq = (t != null) ? Integer.parseInt(t) : (IngressRoute.getMaxSequence() + 100);
- ins = new Insertable[] { new IngressRoute(seq, feedid, user, subnet, NodeClass.lookupNodeNames(nodepatt)) };
- } catch (Exception e) {
- intlogger.info(e);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add ingress' command.");
- return;
- }
- } else if (path.startsWith("/egress/")) {
- // /internal/route/egress/?sub=%s&node=%s
- try {
- int subid = Integer.parseInt(req.getParameter("sub"));
- EgressRoute er = EgressRoute.getEgressRoute(subid);
- if (er != null) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "An egress route already exists for that subscriber.");
- return;
- }
- String node = NodeClass.normalizeNodename(req.getParameter("node"));
- ins = new Insertable[] { new EgressRoute(subid, node) };
- } catch (Exception e) {
- intlogger.info(e);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add egress' command.");
- return;
- }
- } else if (path.startsWith("/network/")) {
- // /internal/route/network/?from=%s&to=%s&via=%s
- try {
- String nfrom = req.getParameter("from");
- String nto = req.getParameter("to");
- String nvia = req.getParameter("via");
- if (nfrom == null || nto == null || nvia == null) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing arguments in 'add network' command.");
- return;
- }
- nfrom = NodeClass.normalizeNodename(nfrom);
- nto = NodeClass.normalizeNodename(nto);
- nvia = NodeClass.normalizeNodename(nvia);
- NetworkRoute nr = new NetworkRoute(nfrom, nto, nvia);
- for (NetworkRoute route : NetworkRoute.getAllNetworkRoutes()) {
- if (route.getFromnode() == nr.getFromnode() && route.getTonode() == nr.getTonode()) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Network route table already contains a route for "+nfrom+" and "+nto);
- return;
- }
- }
- ins = new Insertable[] { nr };
- } catch (IllegalArgumentException e) {
- intlogger.info(e);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments in 'add network' command.");
- return;
- }
- }
- if (ins == null) {
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, "Bad URL.");
- return;
- }
- boolean rv = true;
- for (Insertable dd : ins) {
- rv &= doInsert(dd);
- }
- if (rv) {
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- provisioningDataChanged();
- provisioningParametersChanged();
- } else {
- // Something went wrong with the INSERT
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/StatisticsServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/StatisticsServlet.java
deleted file mode 100644
index 1c508b77..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/StatisticsServlet.java
+++ /dev/null
@@ -1,588 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TimeZone;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import org.json.JSONException;
-import org.json.LOGJSONObject;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * This Servlet handles requests to the <Statistics API> and <Statistics consilidated resultset>,
- * @author Manish Singh
- * @version $Id: StatisticsServlet.java,v 1.11 2016/08/10 17:27:02 Manish Exp $
- */
-@SuppressWarnings("serial")
-
-public class StatisticsServlet extends BaseServlet {
-
- private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);
- private static final String fmt1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";
- private static final String fmt2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
-
-
- /**
- * DELETE a logging URL -- not supported.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String message = "DELETE not allowed for the logURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * GET a Statistics URL -- retrieve Statistics data for a feed or subscription.
- * See the Statistics API document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
-
- Map map = buildMapFromRequest(req);
- if (map.get("err") != null) {
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments: "+map.get("err"));
- return;
- }
- // check Accept: header??
-
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(LOGLIST_CONTENT_TYPE);
- ServletOutputStream out = resp.getOutputStream();
-
-
- String outputType = "json";
- String feedids = null;
-
- if(req.getParameter("feedid") ==null && req.getParameter("groupid") ==null)
- {
- out.print("Invalid request, Feedid or Group ID is required.");
- }
-
- if(req.getParameter("feedid")!=null && req.getParameter("groupid") == null) {
- map.put("feedids", req.getParameter("feedid").replace("|", ",").toString());
- }
-
- if(req.getParameter("groupid") != null && req.getParameter("feedid") ==null) {
- // String groupid1 = null;
- StringBuffer groupid1 = new StringBuffer();
-
- try {
- System.out.println("feeedidsssssssss");
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter("groupid")));
- System.out.println("feeedids"+req.getParameter("groupid"));
-
- map.put("feedids", groupid1.toString());
- System.out.println("groupid1" +groupid1.toString());
-
-
- } catch (NumberFormatException e) {
- e.printStackTrace();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- if(req.getParameter("groupid") != null && req.getParameter("feedid") !=null) {
- StringBuffer groupid1 = new StringBuffer();
-
-
- try {
- System.out.println("both r not null");
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter("groupid")));
- System.out.println("feeedids"+req.getParameter("groupid"));
- groupid1.append(",");
- groupid1.append(req.getParameter("feedid").replace("|", ",").toString());
-
- map.put("feedids", groupid1.toString());
-
-
- System.out.println("groupid1" +groupid1.toString());
-
-
- } catch (NumberFormatException e) {
- e.printStackTrace();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
-
-
-
- if(req.getParameter("subid")!=null && req.getParameter("feedid") !=null) {
- StringBuffer subidstr = new StringBuffer();
-// subidstr.append(" and e.DELIVERY_SUBID in(subid)");
-// subidstr.append(req.getParameter("subid").replace("|", ",").toString());
- subidstr.append("and e.DELIVERY_SUBID in(");
-
- subidstr.append(req.getParameter("subid").replace("|", ",").toString());
- subidstr.append(")");
- map.put("subid", subidstr.toString());
- }
- if(req.getParameter("subid")!=null && req.getParameter("groupid") !=null) {
- StringBuffer subidstr = new StringBuffer();
-// subidstr.append(" and e.DELIVERY_SUBID in(subid)");
-// subidstr.append(req.getParameter("subid").replace("|", ",").toString());
- subidstr.append("and e.DELIVERY_SUBID in(");
-
- subidstr.append(req.getParameter("subid").replace("|", ",").toString());
- subidstr.append(")");
- map.put("subid", subidstr.toString());
- }
- if(req.getParameter("type")!=null) {
- map.put("eventType", req.getParameter("type").replace("|", ",").toString());
- }
- if(req.getParameter("output_type")!=null) {
- map.put("output_type", req.getParameter("output_type").toString());
- }
- if(req.getParameter("start_time")!=null) {
- map.put("start_time", req.getParameter("start_time").toString());
- }
- if(req.getParameter("end_time")!=null) {
- map.put("end_time", req.getParameter("end_time").toString());
- }
-
- if(req.getParameter("time")!=null) {
- map.put("start_time", req.getParameter("time").toString());
- map.put("end_time", null);
- }
-
-
-
- if(req.getParameter("output_type") !=null)
- {
- outputType = req.getParameter("output_type");
- }
-
-
- try {
-
- String filterQuery = this.queryGeneretor(map);
- eventlogger.debug("SQL Query for Statistics resultset. "+filterQuery);
-
- ResultSet rs=this.getRecordsForSQL(filterQuery);
-
- if(outputType.equals("csv")) {
- resp.setContentType("application/octet-stream");
- Date date = new Date() ;
- SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss") ;
- resp.setHeader("Content-Disposition", "attachment; filename=\"result:"+dateFormat.format(date)+".csv\"");
- eventlogger.info("Generating CSV file from Statistics resultset");
-
- rsToCSV(rs, out);
- }
- else {
- eventlogger.info("Generating JSON for Statistics resultset");
- this.rsToJson(rs, out);
- }
- }
- catch (IOException e) {
- eventlogger.error("IOException - Generating JSON/CSV:"+e);
- e.printStackTrace();
- }
- catch (JSONException e) {
- eventlogger.error("JSONException - executing SQL query:"+e);
- e.printStackTrace();
- } catch (SQLException e) {
- eventlogger.error("SQLException - executing SQL query:"+e);
- e.printStackTrace();
- } catch (ParseException e) {
- eventlogger.error("ParseException - executing SQL query:"+e);
- e.printStackTrace();
- }
- }
-
-
- /**
- * rsToJson - Converting RS to JSON object
- * @exception IOException, SQLException
- * @param out ServletOutputStream, rs as ResultSet
- */
- public void rsToCSV(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {
- String header = "FEEDNAME,FEEDID,FILES_PUBLISHED,PUBLISH_LENGTH, FILES_DELIVERED, DELIVERED_LENGTH, SUBSCRIBER_URL, SUBID, PUBLISH_TIME,DELIVERY_TIME, AverageDelay\n";
-
- // String header = "FEEDNAME,FEEDID,TYPE,REMOTE_ADDR,DELIVERY_SUBID,REQURI,TOTAL CONTENT LENGTH,NO OF FILE,AVERAGE DELAY\n";
-
- out.write(header.getBytes());
-
- while(rs.next()) {
- StringBuffer line = new StringBuffer();
- line.append(rs.getString("FEEDNAME"));
- line.append(",");
- line.append(rs.getString("FEEDID"));
- line.append(",");
- line.append(rs.getString("FILES_PUBLISHED"));
- line.append(",");
- line.append(rs.getString("PUBLISH_LENGTH"));
- line.append(",");
- line.append(rs.getString("FILES_DELIVERED"));
- line.append(",");
- line.append(rs.getString("DELIVERED_LENGTH"));
- line.append(",");
- line.append(rs.getString("SUBSCRIBER_URL"));
- line.append(",");
- line.append(rs.getString("SUBID"));
- line.append(",");
- line.append(rs.getString("PUBLISH_TIME"));
- line.append(",");
- line.append(rs.getString("DELIVERY_TIME"));
- line.append(",");
- line.append(rs.getString("AverageDelay"));
- line.append(",");
-
- line.append("\n");
- out.write(line.toString().getBytes());
- out.flush();
- }
- }
-
- /**
- * rsToJson - Converting RS to JSON object
- * @exception IOException, SQLException
- * @param out ServletOutputStream, rs as ResultSet
- */
- public void rsToJson(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {
-
- String fields[] = {"FEEDNAME","FEEDID","FILES_PUBLISHED","PUBLISH_LENGTH", "FILES_DELIVERED", "DELIVERED_LENGTH", "SUBSCRIBER_URL", "SUBID", "PUBLISH_TIME","DELIVERY_TIME", "AverageDelay"};
- StringBuffer line = new StringBuffer();
-
- line.append("[\n");
-
- while(rs.next()) {
- LOGJSONObject j2 = new LOGJSONObject();
- for (String key : fields) {
- Object v = rs.getString(key);
- if (v != null)
- j2.put(key.toLowerCase(), v);
- else
- j2.put(key.toLowerCase(), "");
- }
- line = line.append(j2.toString());;
- line.append(",\n");
- }
- line.append("]");
- out.print(line.toString());
- }
-
- /**
- * getFeedIdsByGroupId - Getting FEEDID's by GROUP ID.
- * @exception SQL Query SQLException.
- * @param groupIds
- */
- public StringBuffer getFeedIdsByGroupId(int groupIds) throws SQLException{
-
- DB db = null;
- Connection conn = null;
- PreparedStatement prepareStatement = null;
- ResultSet resultSet=null;
- String sqlGoupid = null;
- StringBuffer feedIds = new StringBuffer();
-
- try {
- db = new DB();
- conn = db.getConnection();
- sqlGoupid= " SELECT FEEDID from FEEDS WHERE GROUPID = ?";
- prepareStatement =conn.prepareStatement(sqlGoupid);
- prepareStatement.setInt(1, groupIds);
- resultSet=prepareStatement.executeQuery();
- while(resultSet.next()){
- feedIds.append(resultSet.getInt("FEEDID"));
- feedIds.append(",");
- }
- feedIds.deleteCharAt(feedIds.length()-1);
- System.out.println("feedIds"+feedIds.toString());
-
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- if(resultSet != null) {
- resultSet.close();
- resultSet = null;
- }
-
- if(prepareStatement != null) {
- prepareStatement.close();
- prepareStatement = null;
- }
-
- if(conn != null){
- db.release(conn);
- }
- } catch(Exception e) {
- e.printStackTrace();
- }
- }
- return feedIds;
- }
-
-
- /**
- * queryGeneretor - Generating sql query
- * @exception SQL Query parse exception.
- * @param Map as key value pare of all user input fields
- */
- public String queryGeneretor(Map map) throws ParseException{
-
- String sql = null;
- String eventType = null;
- String feedids = null;
- String start_time = null;
- String end_time = null;
- String subid=" ";
- if(map.get("eventType") != null){
- eventType=(String) map.get("eventType");
- }
- if(map.get("feedids") != null){
- feedids=(String) map.get("feedids");
- }
- if(map.get("start_time") != null){
- start_time=(String) map.get("start_time");
- }
- if(map.get("end_time") != null){
- end_time=(String) map.get("end_time");
- }
- if("all".equalsIgnoreCase(eventType)){
- eventType="PUB','DEL, EXP, PBF";
- }
- if(map.get("subid") != null){
- subid=(String) map.get("subid");
- }
-
- eventlogger.info("Generating sql query to get Statistics resultset. ");
-
- if(end_time==null && start_time==null ){
-
-
- sql="SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in("+feedids+") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("+feedids+") "+subid+" AND m.STATUS=204 AND e.RESULT=204 group by SUBID";
-
- return sql;
- }else if(start_time!=null && end_time==null ){
-
- long inputTimeInMilli=60000*Long.parseLong(start_time);
- Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
- long currentTimeInMilli=cal.getTimeInMillis();
- long compareTime=currentTimeInMilli-inputTimeInMilli;
-
- sql="SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in("+feedids+") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("+feedids+") "+subid+" AND m.STATUS=204 AND e.RESULT=204 and e.event_time>="+compareTime+" group by SUBID";
-
- return sql;
-
- }else{
- SimpleDateFormat inFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
- Date startDate=inFormat.parse(start_time);
- Date endDate=inFormat.parse(end_time);
-
- long startInMillis=startDate.getTime();
- long endInMillis=endDate.getTime();
-
- {
-
- sql="SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in("+feedids+") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("+feedids+") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("+feedids+") "+subid+" AND m.STATUS=204 AND e.RESULT=204 and e.event_time between "+startInMillis+" and "+endInMillis+" group by SUBID";
-
- }
- return sql;
- }
- }
-
-
- /**
- * PUT a Statistics URL -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String message = "PUT not allowed for the StatisticsURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * POST a Statistics URL -- not supported.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- String message = "POST not allowed for the StatisticsURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
-
- private Map buildMapFromRequest(HttpServletRequest req) {
- Map map = new HashMap();
- String s = req.getParameter("type");
- if (s != null) {
- if (s.equals("pub") || s.equals("del") || s.equals("exp")) {
- map.put("type", s);
- } else {
- map.put("err", "bad type");
- return map;
- }
- } else
- map.put("type", "all");
- map.put("publishSQL", "");
- map.put("statusSQL", "");
- map.put("resultSQL", "");
- map.put("reasonSQL", "");
-
- s = req.getParameter("publishId");
- if (s != null) {
- if (s.indexOf("'") >= 0) {
- map.put("err", "bad publishId");
- return map;
- }
- map.put("publishSQL", " AND PUBLISH_ID = '"+s+"'");
- }
-
- s = req.getParameter("statusCode");
- if (s != null) {
- String sql = null;
- if (s.equals("success")) {
- sql = " AND STATUS >= 200 AND STATUS < 300";
- } else if (s.equals("redirect")) {
- sql = " AND STATUS >= 300 AND STATUS < 400";
- } else if (s.equals("failure")) {
- sql = " AND STATUS >= 400";
- } else {
- try {
- Integer n = Integer.parseInt(s);
- if ((n >= 100 && n < 600) || (n == -1))
- sql = " AND STATUS = " + n;
- } catch (NumberFormatException e) {
- }
- }
- if (sql == null) {
- map.put("err", "bad statusCode");
- return map;
- }
- map.put("statusSQL", sql);
- map.put("resultSQL", sql.replaceAll("STATUS", "RESULT"));
- }
-
- s = req.getParameter("expiryReason");
- if (s != null) {
- map.put("type", "exp");
- if (s.equals("notRetryable")) {
- map.put("reasonSQL", " AND REASON = 'notRetryable'");
- } else if (s.equals("retriesExhausted")) {
- map.put("reasonSQL", " AND REASON = 'retriesExhausted'");
- } else if (s.equals("diskFull")) {
- map.put("reasonSQL", " AND REASON = 'diskFull'");
- } else if (s.equals("other")) {
- map.put("reasonSQL", " AND REASON = 'other'");
- } else {
- map.put("err", "bad expiryReason");
- return map;
- }
- }
-
- long stime = getTimeFromParam(req.getParameter("start"));
- if (stime < 0) {
- map.put("err", "bad start");
- return map;
- }
- long etime = getTimeFromParam(req.getParameter("end"));
- if (etime < 0) {
- map.put("err", "bad end");
- return map;
- }
- if (stime == 0 && etime == 0) {
- etime = System.currentTimeMillis();
- stime = etime - TWENTYFOUR_HOURS;
- } else if (stime == 0) {
- stime = etime - TWENTYFOUR_HOURS;
- } else if (etime == 0) {
- etime = stime + TWENTYFOUR_HOURS;
- }
- map.put("timeSQL", String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));
- return map;
- }
- private long getTimeFromParam(final String s) {
- if (s == null)
- return 0;
- try {
- // First, look for an RFC 3339 date
- String fmt = (s.indexOf('.') > 0) ? fmt2 : fmt1;
- SimpleDateFormat sdf = new SimpleDateFormat(fmt);
- Date d = sdf.parse(s);
- return d.getTime();
- } catch (ParseException e) {
- }
- try {
- // Also allow a long (in ms); useful for testing
- long n = Long.parseLong(s);
- return n;
- } catch (NumberFormatException e) {
- }
- intlogger.info("Error parsing time="+s);
- return -1;
- }
-
-
- private ResultSet getRecordsForSQL(String sql) {
- intlogger.debug(sql);
- long start = System.currentTimeMillis();
- DB db = new DB();
- Connection conn = null;
- ResultSet rs=null;
-
- try {
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- PreparedStatement pst=conn.prepareStatement(sql);
- rs=pst.executeQuery();
- //this.rsToJson(rs)
- //rs.close();
- stmt.close();
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
-
- intlogger.debug("Time: " + (System.currentTimeMillis()-start) + " ms");
-
- return rs;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubLogServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubLogServlet.java
deleted file mode 100644
index 0f196b28..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubLogServlet.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-/**
- * This servlet handles requests to the <subLogURL>,
- * which are generated by the provisioning server to handle the log query API.
- *
- * @author Robert Eby
- * @version $Id: SubLogServlet.java,v 1.1 2013/04/26 21:00:25 eby Exp $
- */
-@SuppressWarnings("serial")
-public class SubLogServlet extends LogServlet {
- public SubLogServlet() {
- super(false);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscribeServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscribeServlet.java
deleted file mode 100644
index ea79e9ff..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscribeServlet.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InvalidObjectException;
-import java.util.Collection;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONObject;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-import com.att.research.datarouter.provisioning.utils.JSONUtilities;
-
-/**
- * This servlet handles provisioning for the <subscribeURL> which is generated by the provisioning
- * server to handle the creation and inspection of subscriptions to a specific feed.
- *
- * @author Robert Eby
- * @version $Id$
- */
-@SuppressWarnings("serial")
-public class SubscribeServlet extends ProxyServlet {
-
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.SubscribeServlet");
-
- /**
- * DELETE on the <subscribeUrl> -- not supported.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "DELETE not allowed for the subscribeURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * GET on the <subscribeUrl> -- get the list of subscriptions to a feed.
- * See the Subscription Collection Query section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // Display a list of URLs
- Collection list = Subscription.getSubscriptionUrlList(feedid);
- String t = JSONUtilities.createJSONArray(list);
-
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(SUBLIST_CONTENT_TYPE);
- resp.getOutputStream().print(t);
- }
- /**
- * PUT on the <subscribeUrl> -- not supported.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- String message = "PUT not allowed for the subscribeURL.";
- EventLogRecord elr = new EventLogRecord(req);
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
- }
- /**
- * POST on the <subscribeUrl> -- create a new subscription to a feed.
- * See the Creating a Subscription section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int feedid = getIdFromPath(req);
- if (feedid < 0) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Feed feed = Feed.getFeedById(feedid);
- if (feed == null || feed.isDeleted()) {
- message = "Missing or bad feed number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // check content type is SUB_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- intlogger.debug("Content-type is: "+req.getHeader("Content-Type"));
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
- if (++active_subs > max_subs) {
- active_subs--;
- message = "Cannot create subscription; the maximum number of subscriptions has been configured.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_CONFLICT);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_CONFLICT, message);
- return;
- }
- Subscription sub = null;
- try {
- sub = new Subscription(jo);
- } catch (InvalidObjectException e) {
- active_subs--;
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- sub.setFeedid(feedid);
- sub.setSubscriber(bhdr); // set from X-ATT-DR-ON-BEHALF-OF header
-
- // Check if this subscription already exists; not an error (yet), just warn
- Subscription sub2 = Subscription.getSubscriptionMatching(sub);
- if (sub2 != null)
- intlogger.warn("PROV0011 Creating a duplicate subscription: new subid="+sub.getSubid()+", old subid="+sub2.getSubid());
-
- // Create SUBSCRIPTIONS table entries
- if (doInsert(sub)) {
- // send response
- elr.setResult(HttpServletResponse.SC_CREATED);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_CREATED);
- resp.setContentType(SUBFULL_CONTENT_TYPE);
- resp.setHeader("Location", sub.getLinks().getSelf());
- resp.getOutputStream().print(sub.asLimitedJSONObject().toString());
-
- provisioningDataChanged();
- } else {
- // Something went wrong with the INSERT
- active_subs--;
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscriptionServlet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscriptionServlet.java
deleted file mode 100644
index 0bb47175..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SubscriptionServlet.java
+++ /dev/null
@@ -1,476 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.IOException;
-import java.io.InvalidObjectException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.List;
-import java.util.Vector;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.att.research.datarouter.authz.AuthorizationResponse;
-import com.att.research.datarouter.provisioning.beans.EventLogRecord;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.eelf.EelfMsgs;
-
-/**
- * This servlet handles provisioning for the <subscriptionURL> which is generated by the provisioning
- * server to handle the inspection, modification, and deletion of a particular subscription to a feed.
- * It supports DELETE to delete a subscription, GET to retrieve information about the subscription,
- * and PUT to modify the subscription. In DR 3.0, POST is also supported in order to reset the subscription
- * timers for individual subscriptions.
- *
- * @author Robert Eby
- * @version $Id$
- */
-@SuppressWarnings("serial")
-public class SubscriptionServlet extends ProxyServlet {
- public static final String SUBCNTRL_CONTENT_TYPE = "application/vnd.att-dr.subscription-control";
- //Adding EELF Logger Rally:US664892
- private static EELFLogger eelflogger = EELFManager.getInstance().getLogger("com.att.research.datarouter.provisioning.SubscriptionServlet");
-
- /**
- * DELETE on the <subscriptionUrl> -- delete a subscription.
- * See the Deleting a Subscription section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doDelete");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doDelete(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int subid = getIdFromPath(req);
- if (subid < 0) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Subscription sub = Subscription.getSubscriptionById(subid);
- if (sub == null) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // Delete Subscription
- if (doDelete(sub)) {
- active_subs--;
- // send response
- elr.setResult(HttpServletResponse.SC_NO_CONTENT);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_NO_CONTENT);
- provisioningDataChanged();
- } else {
- // Something went wrong with the DELETE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * GET on the <subscriptionUrl> -- get information about a subscription.
- * See the Retreiving Information about a Subscription section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doGet");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doGet(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int subid = getIdFromPath(req);
- if (subid < 0) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Subscription sub = Subscription.getSubscriptionById(subid);
- if (sub == null) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
-
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(SUBFULL_CONTENT_TYPE);
- resp.getOutputStream().print(sub.asJSONObject(true).toString());
- }
- /**
- * PUT on the <subscriptionUrl> -- modify a subscription.
- * See the Modifying a Subscription section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPut(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- setIpAndFqdnForEelf("doPut");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_SUBID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPut(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- int subid = getIdFromPath(req);
- if (subid < 0) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- Subscription oldsub = Subscription.getSubscriptionById(subid);
- if (oldsub == null) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_NOT_FOUND);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_NOT_FOUND, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- // check content type is SUB_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUB_BASECONTENT_TYPE) || !(ver.equals("1.0") || ver.equals("2.0"))) {
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- if (intlogger.isDebugEnabled())
- intlogger.debug(jo.toString());
- Subscription sub = null;
- try {
- sub = new Subscription(jo);
- } catch (InvalidObjectException e) {
- message = e.getMessage();
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- sub.setSubid(oldsub.getSubid());
- sub.setFeedid(oldsub.getFeedid());
- sub.setSubscriber(bhdr); // set from X-ATT-DR-ON-BEHALF-OF header
-
- String subjectgroup = (req.getHeader("X-ATT-DR-ON-BEHALF-OF-GROUP")); //Adding for group feature:Rally US708115
- if (!oldsub.getSubscriber().equals(sub.getSubscriber()) && subjectgroup == null) {
- message = "This subscriber must be modified by the same subscriber that created it.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
-
- // Update SUBSCRIPTIONS table entries
- if (doUpdate(sub)) {
- // send response
- elr.setResult(HttpServletResponse.SC_OK);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_OK);
- resp.setContentType(SUBFULL_CONTENT_TYPE);
- resp.getOutputStream().print(sub.asLimitedJSONObject().toString());
-
- /**Change Owner ship of Subscriber Adding for group feature:Rally US708115*/
- if (jo.has("changeowner") && subjectgroup != null) {
- Boolean changeowner = (Boolean) jo.get("changeowner");
- if (changeowner != null && changeowner.equals(true)) {
- sub.setSubscriber(req.getHeader(BEHALF_HEADER));
- sub.changeOwnerShip();
- }
- }
- /***End of change ownership*/
-
- provisioningDataChanged();
- } else {
- // Something went wrong with the UPDATE
- elr.setResult(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, DB_PROBLEM_MSG);
- }
- }
- /**
- * POST on the <subscriptionUrl> -- control a subscription.
- * See the Resetting a Subscription's Retry Schedule section in the Provisioning API
- * document for details on how this method should be invoked.
- */
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws IOException {
-// OLD pre-3.0 code
-// String message = "POST not allowed for the subscriptionURL.";
-// EventLogRecord elr = new EventLogRecord(req);
-// elr.setMessage(message);
-// elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
-// eventlogger.info(elr);
-// resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, message);
-
- setIpAndFqdnForEelf("doPost");
- eelflogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));
- EventLogRecord elr = new EventLogRecord(req);
- String message = isAuthorizedForProvisioning(req);
- if (message != null) {
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- if (isProxyServer()) {
- super.doPost(req, resp);
- return;
- }
- String bhdr = req.getHeader(BEHALF_HEADER);
- if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- final int subid = getIdFromPath(req);
- if (subid < 0 || Subscription.getSubscriptionById(subid) == null) {
- message = "Missing or bad subscription number.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- // check content type is SUBCNTRL_CONTENT_TYPE, version 1.0
- ContentHeader ch = getContentHeader(req);
- String ver = ch.getAttribute("version");
- if (!ch.getType().equals(SUBCNTRL_CONTENT_TYPE) || !ver.equals("1.0")) {
- message = "Incorrect content-type";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, message);
- return;
- }
- // Check with the Authorizer
- AuthorizationResponse aresp = authz.decide(req);
- if (! aresp.isAuthorized()) {
- message = "Policy Engine disallows access.";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_FORBIDDEN);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_FORBIDDEN, message);
- return;
- }
- JSONObject jo = getJSONfromInput(req);
- if (jo == null) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- return;
- }
- try {
- // Only the active POD sends notifications
- boolean active = SynchronizerTask.getSynchronizer().isActive();
- boolean b = jo.getBoolean("failed");
- if (active && !b) {
- // Notify all nodes to reset the subscription
- SubscriberNotifyThread t = new SubscriberNotifyThread();
- t.resetSubscription(subid);
- t.start();
- }
- // send response
- elr.setResult(HttpServletResponse.SC_ACCEPTED);
- eventlogger.info(elr);
- resp.setStatus(HttpServletResponse.SC_ACCEPTED);
- } catch (JSONException e) {
- message = "Badly formed JSON";
- elr.setMessage(message);
- elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
- eventlogger.info(elr);
- resp.sendError(HttpServletResponse.SC_BAD_REQUEST, message);
- }
- }
-
- /**
- * A Thread class used to serially send reset notifications to all nodes in the DR network,
- * when a POST is received for a subscription.
- */
- public class SubscriberNotifyThread extends Thread {
- public static final String URL_TEMPLATE = "http://%s/internal/resetSubscription/%d";
- private List urls = new Vector();
-
- public SubscriberNotifyThread() {
- setName("SubscriberNotifyThread");
- }
- public void resetSubscription(int subid) {
- for (String nodename : BaseServlet.getNodes()) {
- String u = String.format(URL_TEMPLATE, nodename, subid);
- urls.add(u);
- }
- }
- public void run() {
- try {
- while (!urls.isEmpty()) {
- String u = urls.remove(0);
- try {
- URL url = new URL(u);
- HttpURLConnection conn = (HttpURLConnection) url.openConnection();
- conn.connect();
- conn.getContentLength(); // Force the GET through
- conn.disconnect();
- } catch (IOException e) {
- intlogger.info("IOException Error accessing URL: "+u+": " + e.getMessage());
- }
- }
- } catch (Exception e) {
- intlogger.warn("Caught exception in SubscriberNotifyThread: "+e);
- e.printStackTrace();
- }
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SynchronizerTask.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SynchronizerTask.java
deleted file mode 100644
index 9cb9b7c0..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/SynchronizerTask.java
+++ /dev/null
@@ -1,614 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.security.KeyStore;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.TreeSet;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.conn.scheme.Scheme;
-import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.entity.ByteArrayEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.impl.client.AbstractHttpClient;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.log4j.Logger;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-import com.att.research.datarouter.provisioning.beans.EgressRoute;
-import com.att.research.datarouter.provisioning.beans.Feed;
-import com.att.research.datarouter.provisioning.beans.IngressRoute;
-import com.att.research.datarouter.provisioning.beans.NetworkRoute;
-import com.att.research.datarouter.provisioning.beans.Parameters;
-import com.att.research.datarouter.provisioning.beans.Subscription;
-import com.att.research.datarouter.provisioning.beans.Syncable;
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.RLEBitSet;
-import com.att.research.datarouter.provisioning.utils.LogfileLoader;
-import com.att.research.datarouter.provisioning.utils.URLUtilities;
-import com.att.research.datarouter.provisioning.beans.Group; //Groups feature Rally:US708115 - 1610
-
-/**
- * This class handles synchronization between provisioning servers (PODs). It has three primary functions:
- *
- * Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to
- * the active (master) POD.
- * On non-master (standby) PODs, fetches provisioning data and logs in order to keep MySQL in sync.
- * Providing information to other parts of the system as to the current role (ACTIVE, STANDBY, UNKNOWN)
- * of this POD.
- *
- * For this to work correctly, the following code needs to be placed at the beginning of main().
- *
- * Security.setProperty("networkaddress.cache.ttl", "10");
- *
- *
- * @author Robert Eby
- * @version $Id: SynchronizerTask.java,v 1.10 2014/03/21 13:50:10 eby Exp $
- */
-public class SynchronizerTask extends TimerTask {
- /** This is a singleton -- there is only one SynchronizerTask object in the server */
- private static SynchronizerTask synctask;
-
- /** This POD is unknown -- not on the list of PODs */
- public static final int UNKNOWN = 0;
- /** This POD is active -- on the list of PODs, and the DNS CNAME points to us */
- public static final int ACTIVE = 1;
- /** This POD is standby -- on the list of PODs, and the DNS CNAME does not point to us */
- public static final int STANDBY = 2;
- private static final String[] stnames = { "UNKNOWN", "ACTIVE", "STANDBY" };
- private static final long ONE_HOUR = 60 * 60 * 1000L;
-
- private final Logger logger;
- private final Timer rolex;
- private final String spooldir;
- private int state;
- private boolean doFetch;
- private long nextsynctime;
- private AbstractHttpClient httpclient = null;
-
- /**
- * Get the singleton SynchronizerTask object.
- * @return the SynchronizerTask
- */
- public static synchronized SynchronizerTask getSynchronizer() {
- if (synctask == null)
- synctask = new SynchronizerTask();
- return synctask;
- }
-
- @SuppressWarnings("deprecation")
- private SynchronizerTask() {
- logger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- rolex = new Timer();
- spooldir = (new DB()).getProperties().getProperty("com.att.research.datarouter.provserver.spooldir");
- state = UNKNOWN;
- doFetch = true; // start off with a fetch
- nextsynctime = 0;
-
- logger.info("PROV5000: Sync task starting, server state is UNKNOWN");
- try {
- Properties props = (new DB()).getProperties();
- String type = props.getProperty(Main.KEYSTORE_TYPE_PROPERTY, "jks");
- String store = props.getProperty(Main.KEYSTORE_PATH_PROPERTY);
- String pass = props.getProperty(Main.KEYSTORE_PASSWORD_PROPERTY);
- KeyStore keyStore = KeyStore.getInstance(type);
- FileInputStream instream = new FileInputStream(new File(store));
- keyStore.load(instream, pass.toCharArray());
- instream.close();
-
- store = props.getProperty(Main.TRUSTSTORE_PATH_PROPERTY);
- pass = props.getProperty(Main.TRUSTSTORE_PASSWORD_PROPERTY);
- KeyStore trustStore = null;
- if (store != null && store.length() > 0) {
- trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
- instream = new FileInputStream(new File(store));
- trustStore.load(instream, pass.toCharArray());
- instream.close();
- }
-
- // We are connecting with the node name, but the certificate will have the CNAME
- // So we need to accept a non-matching certificate name
- String keystorepass = props.getProperty(Main.KEYSTORE_PASSWORD_PROPERTY); //itrack.web.att.com/browse/DATARTR-6 for changing hard coded passphase ref
- AbstractHttpClient hc = new DefaultHttpClient();
- SSLSocketFactory socketFactory =
- (trustStore == null)
- ? new SSLSocketFactory(keyStore, keystorepass)
- : new SSLSocketFactory(keyStore, keystorepass, trustStore);
- socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
- Scheme sch = new Scheme("https", 443, socketFactory);
- hc.getConnectionManager().getSchemeRegistry().register(sch);
- httpclient = hc;
-
- // Run once every 5 seconds to check DNS, etc.
- long interval = 0;
- try {
- String s = props.getProperty("com.att.research.datarouter.provserver.sync_interval", "5000");
- interval = Long.parseLong(s);
- } catch (NumberFormatException e) {
- interval = 5000L;
- }
- rolex.scheduleAtFixedRate(this, 0L, interval);
- } catch (Exception e) {
- logger.warn("PROV5005: Problem starting the synchronizer: "+e);
- }
- }
-
- /**
- * What is the state of this POD?
- * @return one of ACTIVE, STANDBY, UNKNOWN
- */
- public int getState() {
- return state;
- }
-
- /**
- * Is this the active POD?
- * @return true if we are active (the master), false otherwise
- */
- public boolean isActive() {
- return state == ACTIVE;
- }
-
- /**
- * This method is used to signal that another POD (the active POD) has sent us a /fetchProv request,
- * and that we should re-synchronize with the master.
- */
- public void doFetch() {
- doFetch = true;
- }
-
- /**
- * Runs once a minute in order to
- * lookup DNS names,
- * determine the state of this POD,
- * if this is a standby POD, and the fetch flag is set, perform a fetch of state from the active POD.
- * if this is a standby POD, check if there are any new log records to be replicated.
- *
- */
- @Override
- public void run() {
- try {
- state = lookupState();
- if (state == STANDBY) {
- // Only copy provisioning data FROM the active server TO the standby
- if (doFetch || (System.currentTimeMillis() >= nextsynctime)) {
- logger.debug("Initiating a sync...");
- JSONObject jo = readProvisioningJSON();
- if (jo != null) {
- doFetch = false;
- syncFeeds( jo.getJSONArray("feeds"));
- syncSubs( jo.getJSONArray("subscriptions"));
- syncGroups( jo.getJSONArray("groups")); //Rally:US708115 - 1610
- syncParams(jo.getJSONObject("parameters"));
- // The following will not be present in a version=1.0 provfeed
- JSONArray ja = jo.optJSONArray("ingress");
- if (ja != null)
- syncIngressRoutes(ja);
- JSONObject j2 = jo.optJSONObject("egress");
- if (j2 != null)
- syncEgressRoutes( j2);
- ja = jo.optJSONArray("routing");
- if (ja != null)
- syncNetworkRoutes(ja);
- }
- logger.info("PROV5013: Sync completed.");
- nextsynctime = System.currentTimeMillis() + ONE_HOUR;
- }
- } else {
- // Don't do fetches on non-standby PODs
- doFetch = false;
- }
-
- // Fetch DR logs as needed - server to server
- LogfileLoader lfl = LogfileLoader.getLoader();
- if (lfl.isIdle()) {
- // Only fetch new logs if the loader is waiting for them.
- logger.trace("Checking for logs to replicate...");
- RLEBitSet local = lfl.getBitSet();
- RLEBitSet remote = readRemoteLoglist();
- remote.andNot(local);
- if (!remote.isEmpty()) {
- logger.debug(" Replicating logs: "+remote);
- replicateDRLogs(remote);
- }
- }
- } catch (Exception e) {
- logger.warn("PROV0020: Caught exception in SynchronizerTask: "+e);
- e.printStackTrace();
- }
- }
-
- /**
- * This method is used to lookup the CNAME that points to the active server.
- * It returns 0 (UNKNOWN), 1(ACTIVE), or 2 (STANDBY) to indicate the state of this server.
- * @return the current state
- */
- private int lookupState() {
- int newstate = UNKNOWN;
- try {
- InetAddress myaddr = InetAddress.getLocalHost();
- if (logger.isTraceEnabled())
- logger.trace("My address: "+myaddr);
- String this_pod = myaddr.getHostName();
- Set pods = new TreeSet(Arrays.asList(BaseServlet.getPods()));
- if (pods.contains(this_pod)) {
- InetAddress pserver = InetAddress.getByName(BaseServlet.active_prov_name);
- newstate = myaddr.equals(pserver) ? ACTIVE : STANDBY;
- if (logger.isDebugEnabled() && System.currentTimeMillis() >= next_msg) {
- logger.debug("Active POD = "+pserver+", Current state is "+stnames[newstate]);
- next_msg = System.currentTimeMillis() + (5 * 60 * 1000L);
- }
- } else {
- logger.warn("PROV5003: My name ("+this_pod+") is missing from the list of provisioning servers.");
- }
- } catch (UnknownHostException e) {
- logger.warn("PROV5002: Cannot determine the name of this provisioning server.");
- }
-
- if (newstate != state)
- logger.info(String.format("PROV5001: Server state changed from %s to %s", stnames[state], stnames[newstate]));
- return newstate;
- }
- private static long next_msg = 0; // only display the "Current state" msg every 5 mins.
- /** Synchronize the Feeds in the JSONArray, with the Feeds in the DB. */
- private void syncFeeds(JSONArray ja) {
- Collection coll = new ArrayList();
- for (int n = 0; n < ja.length(); n++) {
- try {
- Feed f = new Feed(ja.getJSONObject(n));
- coll.add(f);
- } catch (Exception e) {
- logger.warn("PROV5004: Invalid object in feed: "+ja.optJSONObject(n));
- }
- }
- if (sync(coll, Feed.getAllFeeds()))
- BaseServlet.provisioningDataChanged();
- }
- /** Synchronize the Subscriptions in the JSONArray, with the Subscriptions in the DB. */
- private void syncSubs(JSONArray ja) {
- Collection coll = new ArrayList();
- for (int n = 0; n < ja.length(); n++) {
- try {
- //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- JSONObject j = ja.getJSONObject(n);
- j.put("sync", "true");
- Subscription s = new Subscription(j);
- coll.add(s);
- } catch (Exception e) {
- logger.warn("PROV5004: Invalid object in subscription: "+ja.optJSONObject(n));
- }
- }
- if (sync(coll, Subscription.getAllSubscriptions()))
- BaseServlet.provisioningDataChanged();
- }
-
- /** Rally:US708115 - Synchronize the Groups in the JSONArray, with the Groups in the DB. */
- private void syncGroups(JSONArray ja) {
- Collection coll = new ArrayList();
- for (int n = 0; n < ja.length(); n++) {
- try {
- Group g = new Group(ja.getJSONObject(n));
- coll.add(g);
- } catch (Exception e) {
- logger.warn("PROV5004: Invalid object in subscription: "+ja.optJSONObject(n));
- }
- }
- if (sync(coll, Group.getAllgroups()))
- BaseServlet.provisioningDataChanged();
- }
-
-
- /** Synchronize the Parameters in the JSONObject, with the Parameters in the DB. */
- private void syncParams(JSONObject jo) {
- Collection coll = new ArrayList();
- for (String k : jo.keySet()) {
- String v = "";
- try {
- v = jo.getString(k);
- } catch (JSONException e) {
- try {
- v = ""+jo.getInt(k);
- } catch (JSONException e1) {
- JSONArray ja = jo.getJSONArray(k);
- for (int i = 0; i < ja.length(); i++) {
- if (i > 0)
- v += "|";
- v += ja.getString(i);
- }
- }
- }
- coll.add(new Parameters(k, v));
- }
- if (sync(coll, Parameters.getParameterCollection())) {
- BaseServlet.provisioningDataChanged();
- BaseServlet.provisioningParametersChanged();
- }
- }
- private void syncIngressRoutes(JSONArray ja) {
- Collection coll = new ArrayList();
- for (int n = 0; n < ja.length(); n++) {
- try {
- IngressRoute in = new IngressRoute(ja.getJSONObject(n));
- coll.add(in);
- } catch (NumberFormatException e) {
- logger.warn("PROV5004: Invalid object in ingress routes: "+ja.optJSONObject(n));
- }
- }
- if (sync(coll, IngressRoute.getAllIngressRoutes()))
- BaseServlet.provisioningDataChanged();
- }
- private void syncEgressRoutes(JSONObject jo) {
- Collection coll = new ArrayList();
- for (String key : jo.keySet()) {
- try {
- int sub = Integer.parseInt(key);
- String node = jo.getString(key);
- EgressRoute er = new EgressRoute(sub, node);
- coll.add(er);
- } catch (NumberFormatException e) {
- logger.warn("PROV5004: Invalid subid in egress routes: "+key);
- } catch (IllegalArgumentException e) {
- logger.warn("PROV5004: Invalid node name in egress routes: "+key);
- }
- }
- if (sync(coll, EgressRoute.getAllEgressRoutes()))
- BaseServlet.provisioningDataChanged();
- }
- private void syncNetworkRoutes(JSONArray ja) {
- Collection coll = new ArrayList();
- for (int n = 0; n < ja.length(); n++) {
- try {
- NetworkRoute nr = new NetworkRoute(ja.getJSONObject(n));
- coll.add(nr);
- } catch (JSONException e) {
- logger.warn("PROV5004: Invalid object in network routes: "+ja.optJSONObject(n));
- }
- }
- if (sync(coll, NetworkRoute.getAllNetworkRoutes()))
- BaseServlet.provisioningDataChanged();
- }
- private boolean sync(Collection extends Syncable> newc, Collection extends Syncable> oldc) {
- boolean changes = false;
- try {
- Map newmap = getMap(newc);
- Map oldmap = getMap(oldc);
- Set union = new TreeSet(newmap.keySet());
- union.addAll(oldmap.keySet());
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- for (String n : union) {
- Syncable newobj = newmap.get(n);
- Syncable oldobj = oldmap.get(n);
- if (oldobj == null) {
- if (logger.isDebugEnabled())
- logger.debug(" Inserting record: "+newobj);
- newobj.doInsert(conn);
- changes = true;
- } else if (newobj == null) {
- if (logger.isDebugEnabled())
- logger.debug(" Deleting record: "+oldobj);
- oldobj.doDelete(conn);
- changes = true;
- } else if (!newobj.equals(oldobj)) {
- if (logger.isDebugEnabled())
- logger.debug(" Updating record: "+newobj);
- newobj.doUpdate(conn);
-
- /**Rally US708115
- * Change Ownership of FEED - 1610, Syncronised with secondary DB.
- * */
- checkChnageOwner(newobj, oldobj);
-
- changes = true;
- }
- }
- db.release(conn);
- } catch (SQLException e) {
- logger.warn("PROV5009: problem during sync, exception: "+e);
- e.printStackTrace();
- }
- return changes;
- }
- private Map getMap(Collection extends Syncable> c) {
- Map map = new HashMap();
- for (Syncable v : c) {
- map.put(v.getKey(), v);
- }
- return map;
- }
-
-
- /**Change owner of FEED/SUBSCRIPTION*/
- /**Rally US708115
- * Change Ownership of FEED - 1610
- *
- * */
- private void checkChnageOwner(Syncable newobj, Syncable oldobj) {
- if(newobj instanceof Feed) {
- Feed oldfeed = (Feed) oldobj;
- Feed newfeed = (Feed) newobj;
-
- if(!oldfeed.getPublisher().equals(newfeed.getPublisher())){
- logger.info("PROV5013 - Previous publisher: "+oldfeed.getPublisher() +": New publisher-"+newfeed.getPublisher());
- oldfeed.setPublisher(newfeed.getPublisher());
- oldfeed.changeOwnerShip();
- }
- }
- else if(newobj instanceof Subscription) {
- Subscription oldsub = (Subscription) oldobj;
- Subscription newsub = (Subscription) newobj;
-
- if(!oldsub.getSubscriber().equals(newsub.getSubscriber())){
- logger.info("PROV5013 - Previous subscriber: "+oldsub.getSubscriber() +": New subscriber-"+newsub.getSubscriber());
- oldsub.setSubscriber(newsub.getSubscriber());
- oldsub.changeOwnerShip();
- }
- }
-
- }
-
- /**
- * Issue a GET on the peer POD's /internal/prov/ URL to get a copy of its provisioning data.
- * @return the provisioning data (as a JONObject)
- */
- private synchronized JSONObject readProvisioningJSON() {
- String url = URLUtilities.generatePeerProvURL();
- HttpGet get = new HttpGet(url);
- try {
- HttpResponse response = httpclient.execute(get);
- int code = response.getStatusLine().getStatusCode();
- if (code != HttpServletResponse.SC_OK) {
- logger.warn("PROV5010: readProvisioningJSON failed, bad error code: "+code);
- return null;
- }
- HttpEntity entity = response.getEntity();
- String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE1) && !ctype.equals(BaseServlet.PROVFULL_CONTENT_TYPE2)) {
- logger.warn("PROV5011: readProvisioningJSON failed, bad content type: "+ctype);
- return null;
- }
- return new JSONObject(new JSONTokener(entity.getContent()));
- } catch (Exception e) {
- logger.warn("PROV5012: readProvisioningJSON failed, exception: "+e);
- return null;
- } finally {
- get.releaseConnection();
- }
- }
- /**
- * Issue a GET on the peer POD's /internal/drlogs/ URL to get an RELBitSet representing the
- * log records available in the remote database.
- * @return the bitset
- */
- private RLEBitSet readRemoteLoglist() {
- RLEBitSet bs = new RLEBitSet();
- String url = URLUtilities.generatePeerLogsURL();
-
- //Fixing if only one Prov is configured, not to give exception to fill logs, return empty bitset.
- if(url.equals("")) {
- return bs;
- }
- //End of fix.
-
- HttpGet get = new HttpGet(url);
- try {
- HttpResponse response = httpclient.execute(get);
- int code = response.getStatusLine().getStatusCode();
- if (code != HttpServletResponse.SC_OK) {
- logger.warn("PROV5010: readRemoteLoglist failed, bad error code: "+code);
- return bs;
- }
- HttpEntity entity = response.getEntity();
- String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals("text/plain")) {
- logger.warn("PROV5011: readRemoteLoglist failed, bad content type: "+ctype);
- return bs;
- }
- InputStream is = entity.getContent();
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- int ch = 0;
- while ((ch = is.read()) >= 0)
- bos.write(ch);
- bs.set(bos.toString());
- is.close();
- } catch (Exception e) {
- logger.warn("PROV5012: readRemoteLoglist failed, exception: "+e);
- return bs;
- } finally {
- get.releaseConnection();
- }
- return bs;
- }
- /**
- * Issue a POST on the peer POD's /internal/drlogs/ URL to fetch log records available
- * in the remote database that we wish to copy to the local database.
- * @param bs the bitset (an RELBitSet) of log records to fetch
- */
- private void replicateDRLogs(RLEBitSet bs) {
- String url = URLUtilities.generatePeerLogsURL();
- HttpPost post = new HttpPost(url);
- try {
- String t = bs.toString();
- HttpEntity body = new ByteArrayEntity(t.getBytes(), ContentType.create("text/plain"));
- post.setEntity(body);
- if (logger.isDebugEnabled())
- logger.debug("Requesting records: "+t);
-
- HttpResponse response = httpclient.execute(post);
- int code = response.getStatusLine().getStatusCode();
- if (code != HttpServletResponse.SC_OK) {
- logger.warn("PROV5010: replicateDRLogs failed, bad error code: "+code);
- return;
- }
- HttpEntity entity = response.getEntity();
- String ctype = entity.getContentType().getValue().trim();
- if (!ctype.equals("text/plain")) {
- logger.warn("PROV5011: replicateDRLogs failed, bad content type: "+ctype);
- return;
- }
-
- String spoolname = "" + System.currentTimeMillis();
- Path tmppath = Paths.get(spooldir, spoolname);
- Path donepath = Paths.get(spooldir, "IN."+spoolname);
- Files.copy(entity.getContent(), Paths.get(spooldir, spoolname), StandardCopyOption.REPLACE_EXISTING);
- Files.move(tmppath, donepath, StandardCopyOption.REPLACE_EXISTING);
- logger.info("Approximately "+bs.cardinality()+" records replicated.");
- } catch (Exception e) {
- logger.warn("PROV5012: replicateDRLogs failed, exception: "+e);
- } finally {
- post.releaseConnection();
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/BaseLogRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/BaseLogRecord.java
deleted file mode 100644
index 327f95f2..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/BaseLogRecord.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import org.json.LOGJSONObject;
-
-/**
- * Define the common fields used by the three types of records generated by DR nodes.
- *
- * @author Robert Eby
- * @version $Id: BaseLogRecord.java,v 1.10 2013/10/29 16:57:57 eby Exp $
- */
-public class BaseLogRecord implements LOGJSONable, Loadable {
- protected static final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
-
- private long eventTime;
- private String publishId;
- private int feedid;
- private String requestUri;
- private String method;
- private String contentType;
- private long contentLength;
-
- protected BaseLogRecord(String[] pp) throws ParseException {
-// This throws exceptions occasionally - don't know why.
-// Date d = null;
-// synchronized (sdf) {
-// d = sdf.parse(pp[0]);
-// }
- Date d = parseDate(pp[0]);
- this.eventTime = d.getTime();
- this.publishId = pp[2];
- this.feedid = Integer.parseInt(pp[3]);
- if (pp[1].equals("DLX")) {
- this.requestUri = "";
- this.method = "GET"; // Note: we need a valid value in this field, even though unused
- this.contentType = "";
- this.contentLength = Long.parseLong(pp[5]);
- } else if (pp[1].equals("PUB") || pp[1].equals("LOG") || pp[1].equals("PBF")) {
- this.requestUri = pp[4];
- this.method = pp[5];
- this.contentType = pp[6];
- this.contentLength = Long.parseLong(pp[7]);
- } else {
- this.requestUri = pp[5];
- this.method = pp[6];
- this.contentType = pp[7];
- this.contentLength = Long.parseLong(pp[8]);
- }
- }
- protected BaseLogRecord(ResultSet rs) throws SQLException {
- this.eventTime = rs.getLong("EVENT_TIME");
- this.publishId = rs.getString("PUBLISH_ID");
- this.feedid = rs.getInt("FEEDID");
- this.requestUri = rs.getString("REQURI");
- this.method = rs.getString("METHOD");
- this.contentType = rs.getString("CONTENT_TYPE");
- this.contentLength = rs.getLong("CONTENT_LENGTH");
- }
- protected Date parseDate(final String s) throws ParseException {
- int[] n = new int[7];
- int p = 0;
- for (int i = 0; i < s.length(); i++) {
- char c = s.charAt(i);
- if (c < '0' || c > '9') {
- p++;
- } else {
- if (p > n.length)
- throw new ParseException("parseDate()", 0);
- n[p] = (n[p] * 10) + (c - '0');
- }
- }
- if (p != 7)
- throw new ParseException("parseDate()", 1);
- Calendar cal = new GregorianCalendar();
- cal.set(Calendar.YEAR, n[0]);
- cal.set(Calendar.MONTH, n[1]-1);
- cal.set(Calendar.DAY_OF_MONTH, n[2]);
- cal.set(Calendar.HOUR_OF_DAY, n[3]);
- cal.set(Calendar.MINUTE, n[4]);
- cal.set(Calendar.SECOND, n[5]);
- cal.set(Calendar.MILLISECOND, n[6]);
- return cal.getTime();
- }
- public long getEventTime() {
- return eventTime;
- }
- public void setEventTime(long eventTime) {
- this.eventTime = eventTime;
- }
- public String getPublishId() {
- return publishId;
- }
- public void setPublishId(String publishId) {
- this.publishId = publishId;
- }
- public int getFeedid() {
- return feedid;
- }
- public void setFeedid(int feedid) {
- this.feedid = feedid;
- }
- public String getRequestUri() {
- return requestUri;
- }
- public void setRequestUri(String requestUri) {
- this.requestUri = requestUri;
- }
- public String getMethod() {
- return method;
- }
- public void setMethod(String method) {
- this.method = method;
- }
- public String getContentType() {
- return contentType;
- }
- public void setContentType(String contentType) {
- this.contentType = contentType;
- }
- public long getContentLength() {
- return contentLength;
- }
- public void setContentLength(long contentLength) {
- this.contentLength = contentLength;
- }
- @Override
- public LOGJSONObject asJSONObject() {
- LOGJSONObject jo = new LOGJSONObject();
- String t = "";
- synchronized (sdf) {
- t = sdf.format(eventTime);
- }
- jo.put("date", t);
- jo.put("publishId", publishId);
- jo.put("requestURI", requestUri);
- jo.put("method", method);
- if (method.equals("PUT")) {
- jo.put("contentType", contentType);
- jo.put("contentLength", contentLength);
- }
- return jo;
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setLong (2, getEventTime());
- ps.setString(3, getPublishId());
- ps.setInt (4, getFeedid());
- ps.setString(5, getRequestUri());
- ps.setString(6, getMethod());
- ps.setString(7, getContentType());
- ps.setLong (8, getContentLength());
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Deleteable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Deleteable.java
deleted file mode 100644
index c16bdbc1..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Deleteable.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-
-/**
- * An object that can be DELETE-ed from the database.
- * @author Robert Eby
- * @version $Id: Deleteable.java,v 1.2 2013/05/29 14:44:36 eby Exp $
- */
-public interface Deleteable {
- /**
- * Delete this object in the DB.
- * @param c the JDBC Connection to use
- * @return true if the DELETE succeeded, false otherwise
- */
- public boolean doDelete(Connection c);
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryExtraRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryExtraRecord.java
deleted file mode 100644
index 1a1cb569..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryExtraRecord.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.text.ParseException;
-
-/**
- * The representation of a Delivery Extra (DLX) Record, as retrieved from the DB.
- * @author Robert Eby
- * @version $Id: DeliveryExtraRecord.java,v 1.1 2013/10/28 18:06:52 eby Exp $
- */
-public class DeliveryExtraRecord extends BaseLogRecord {
- private int subid;
- private long contentLength2;
-
- public DeliveryExtraRecord(String[] pp) throws ParseException {
- super(pp);
- this.subid = Integer.parseInt(pp[4]);
- this.contentLength2 = Long.parseLong(pp[6]);
- }
- public DeliveryExtraRecord(ResultSet rs) throws SQLException {
- super(rs);
- // Note: because this record should be "rare" these fields are mapped to unconventional fields in the DB
- this.subid = rs.getInt("DELIVERY_SUBID");
- this.contentLength2 = rs.getInt("CONTENT_LENGTH_2");
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, "dlx"); // field 1: type
- super.load(ps); // loads fields 2-8
- ps.setNull( 9, Types.VARCHAR);
- ps.setNull(10, Types.VARCHAR);
- ps.setNull(11, Types.VARCHAR);
- ps.setNull(12, Types.INTEGER);
- ps.setInt (13, subid);
- ps.setNull(14, Types.VARCHAR);
- ps.setNull(15, Types.INTEGER);
- ps.setNull(16, Types.INTEGER);
- ps.setNull(17, Types.VARCHAR);
- ps.setLong(19, contentLength2);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryRecord.java
deleted file mode 100644
index b4791d48..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/DeliveryRecord.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.text.ParseException;
-import java.util.LinkedHashMap;
-
-import org.json.LOGJSONObject;
-
-/**
- * The representation of a Delivery Record, as retrieved from the DB.
- * @author Robert Eby
- * @version $Id: DeliveryRecord.java,v 1.9 2014/03/12 19:45:41 eby Exp $
- */
-public class DeliveryRecord extends BaseLogRecord {
- private int subid;
- private String fileid;
- private int result;
- private String user;
-
- public DeliveryRecord(String[] pp) throws ParseException {
- super(pp);
- String fileid = pp[5];
- if (fileid.lastIndexOf('/') >= 0)
- fileid = fileid.substring(fileid.lastIndexOf('/')+1);
- this.subid = Integer.parseInt(pp[4]);
- this.fileid = fileid;
- this.result = Integer.parseInt(pp[10]);
- this.user = pp[9];
- if (this.user != null && this.user.length() > 50)
- this.user = this.user.substring(0, 50);
- }
- public DeliveryRecord(ResultSet rs) throws SQLException {
- super(rs);
- this.subid = rs.getInt("DELIVERY_SUBID");
- this.fileid = rs.getString("DELIVERY_FILEID");
- this.result = rs.getInt("RESULT");
- this.user = rs.getString("USER");
- }
- public int getSubid() {
- return subid;
- }
- public void setSubid(int subid) {
- this.subid = subid;
- }
- public String getFileid() {
- return fileid;
- }
- public void setFileid(String fileid) {
- this.fileid = fileid;
- }
- public int getResult() {
- return result;
- }
- public void setResult(int result) {
- this.result = result;
- }
- public String getUser() {
- return user;
- }
- public void setUser(String user) {
- this.user = user;
- }
-
-
- public LOGJSONObject reOrderObject(LOGJSONObject jo) {
- LinkedHashMap logrecordObj = new LinkedHashMap();
-
- logrecordObj.put("statusCode", jo.get("statusCode"));
- logrecordObj.put("deliveryId", jo.get("deliveryId"));
- logrecordObj.put("publishId", jo.get("publishId"));
- logrecordObj.put("requestURI", jo.get("requestURI"));
- //logrecordObj.put("sourceIP", jo.get("sourceIP"));
- logrecordObj.put("method", jo.get("method"));
- logrecordObj.put("contentType", jo.get("contentType"));
- //logrecordObj.put("endpointId", jo.get("endpointId"));
- logrecordObj.put("type", jo.get("type"));
- logrecordObj.put("date", jo.get("date"));
- logrecordObj.put("contentLength", jo.get("contentLength"));
-
-
- LOGJSONObject newjo = new LOGJSONObject(logrecordObj);
- return newjo;
- }
-
- @Override
- public LOGJSONObject asJSONObject() {
- LOGJSONObject jo = super.asJSONObject();
- jo.put("type", "del");
- jo.put("deliveryId", user);
- jo.put("statusCode", result);
-
- LOGJSONObject newjo = this.reOrderObject(jo);
- return newjo;
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, "del"); // field 1: type
- super.load(ps); // loads fields 2-8
- ps.setNull (9, Types.VARCHAR);
- ps.setNull (10, Types.VARCHAR);
- ps.setString(11, getUser());
- ps.setNull (12, Types.INTEGER);
- ps.setInt (13, getSubid());
- ps.setString(14, getFileid());
- ps.setInt (15, getResult());
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setNull (19, Types.BIGINT);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EgressRoute.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EgressRoute.java
deleted file mode 100644
index 94b59ce4..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EgressRoute.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * The representation of one route in the Egress Route Table.
- *
- * @author Robert P. Eby
- * @version $Id: EgressRoute.java,v 1.3 2013/12/16 20:30:23 eby Exp $
- */
-public class EgressRoute extends NodeClass implements Comparable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private final int subid;
- private final int nodeid;
-
- /**
- * Get a set of all Egress Routes in the DB. The set is sorted according to the natural sorting order
- * of the routes (based on the subscription ID in each route).
- * @return the sorted set
- */
- public static SortedSet getAllEgressRoutes() {
- SortedSet set = new TreeSet();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select SUBID, NODEID from EGRESS_ROUTES");
- while (rs.next()) {
- int subid = rs.getInt("SUBID");
- int nodeid = rs.getInt("NODEID");
- set.add(new EgressRoute(subid, nodeid));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return set;
- }
- /**
- * Get a single Egress Route for the subscription sub .
- * @param sub the subscription to lookup
- * @return an EgressRoute, or null if there is no route for this subscription
- */
- public static EgressRoute getEgressRoute(int sub) {
- EgressRoute v = null;
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- String sql = "select NODEID from EGRESS_ROUTES where SUBID = ?";
- ps = conn.prepareStatement(sql);
- ps.setInt(1, sub);
- ResultSet rs = ps.executeQuery();
- if (rs.next()) {
- int node = rs.getInt("NODEID");
- v = new EgressRoute(sub, node);
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return v;
- }
-
- public EgressRoute(int subid, int nodeid) throws IllegalArgumentException {
- this.subid = subid;
- this.nodeid = nodeid;
-// Note: unlike for Feeds, it subscriptions can be removed from the tables, so it is
-// possible that an orphan ERT entry can exist if a sub is removed.
-// if (Subscription.getSubscriptionById(subid) == null)
-// throw new IllegalArgumentException("No such subscription: "+subid);
- }
-
- public EgressRoute(int subid, String node) throws IllegalArgumentException {
- this(subid, lookupNodeName(node));
- }
-
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "delete from EGRESS_ROUTES where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, subid);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = false;
- PreparedStatement ps = null;
- try {
- // Create the NETWORK_ROUTES row
- String sql = "insert into EGRESS_ROUTES (SUBID, NODEID) values (?, ?)";
- ps = c.prepareStatement(sql);
- ps.setInt(1, this.subid);
- ps.setInt(2, this.nodeid);
- ps.execute();
- ps.close();
- rv = true;
- } catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "update EGRESS_ROUTES set NODEID = ? where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, nodeid);
- ps.setInt(2, subid);
- ps.executeUpdate();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put(""+subid, lookupNodeID(nodeid));
- return jo;
- }
-
- @Override
- public String getKey() {
- return ""+subid;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof EgressRoute))
- return false;
- EgressRoute on = (EgressRoute)obj;
- return (subid == on.subid) && (nodeid == on.nodeid);
- }
-
- @Override
- public int compareTo(EgressRoute o) {
- return this.subid - o.subid;
- }
-
- @Override
- public String toString() {
- return String.format("EGRESS: sub=%d, node=%d", subid, nodeid);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EventLogRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EventLogRecord.java
deleted file mode 100644
index adf45d49..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/EventLogRecord.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.security.cert.X509Certificate;
-
-import javax.servlet.http.HttpServletRequest;
-
-import com.att.research.datarouter.provisioning.BaseServlet;
-
-/**
- * This class is used to log provisioning server events. Each event consists of a who
- * (who made the provisioning request including the IP address, the X-ATT-DR-ON-BEHALF-OF
- * header value, and the client certificate), a what (what request was made; the method
- * and servlet involved), and a how (how the request was handled; the result code and
- * message returned to the client). EventLogRecords are logged using log4j at the INFO level.
- *
- * @author Robert Eby
- * @version $Id: EventLogRecord.java,v 1.1 2013/04/26 21:00:25 eby Exp $
- */
-public class EventLogRecord {
- private final String ipaddr; // Who
- private final String behalfof;
- private final String clientSubject;
- private final String method; // What
- private final String servlet;
- private int result; // How
- private String message;
-
- public EventLogRecord(HttpServletRequest request) {
- // Who is making the request
- this.ipaddr = request.getRemoteAddr();
- String s = request.getHeader(BaseServlet.BEHALF_HEADER);
- this.behalfof = (s != null) ? s : "";
- X509Certificate certs[] = (X509Certificate[]) request.getAttribute(BaseServlet.CERT_ATTRIBUTE);
- this.clientSubject = (certs != null && certs.length > 0)
- ? certs[0].getSubjectX500Principal().getName() : "";
-
- // What is the request
- this.method = request.getMethod();
- this.servlet = request.getServletPath();
-
- // How was it dealt with
- this.result = -1;
- this.message = "";
- }
- public void setResult(int result) {
- this.result = result;
- }
- public void setMessage(String message) {
- this.message = message;
- }
- @Override
- public String toString() {
- return String.format(
- "%s %s \"%s\" %s %s %d \"%s\"",
- ipaddr, behalfof, clientSubject,
- method, servlet,
- result, message
- );
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/ExpiryRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/ExpiryRecord.java
deleted file mode 100644
index 1db5417b..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/ExpiryRecord.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.text.ParseException;
-import java.util.LinkedHashMap;
-
-import org.json.LOGJSONObject;
-
-/**
- * The representation of a Expiry Record, as retrieved from the DB.
- * @author Robert Eby
- * @version $Id: ExpiryRecord.java,v 1.4 2013/10/28 18:06:52 eby Exp $
- */
-public class ExpiryRecord extends BaseLogRecord {
- private int subid;
- private String fileid;
- private int attempts;
- private String reason;
-
- public ExpiryRecord(String[] pp) throws ParseException {
- super(pp);
- String fileid = pp[5];
- if (fileid.lastIndexOf('/') >= 0)
- fileid = fileid.substring(fileid.lastIndexOf('/')+1);
- this.subid = Integer.parseInt(pp[4]);
- this.fileid = fileid;
- this.attempts = Integer.parseInt(pp[10]);
- this.reason = pp[9];
- if (!reason.equals("notRetryable") && !reason.equals("retriesExhausted") && !reason.equals("diskFull"))
- this.reason = "other";
- }
- public ExpiryRecord(ResultSet rs) throws SQLException {
- super(rs);
- this.subid = rs.getInt("DELIVERY_SUBID");
- this.fileid = rs.getString("DELIVERY_FILEID");
- this.attempts = rs.getInt("ATTEMPTS");
- this.reason = rs.getString("REASON");
- }
-
- public int getSubid() {
- return subid;
- }
-
- public void setSubid(int subid) {
- this.subid = subid;
- }
-
- public String getFileid() {
- return fileid;
- }
-
- public void setFileid(String fileid) {
- this.fileid = fileid;
- }
-
- public int getAttempts() {
- return attempts;
- }
-
- public void setAttempts(int attempts) {
- this.attempts = attempts;
- }
-
- public String getReason() {
- return reason;
- }
-
- public void setReason(String reason) {
- this.reason = reason;
- }
-
- public LOGJSONObject reOrderObject(LOGJSONObject jo) {
- LinkedHashMap logrecordObj = new LinkedHashMap();
-
- logrecordObj.put("expiryReason", jo.get("expiryReason"));
- logrecordObj.put("publishId", jo.get("publishId"));
- logrecordObj.put("attempts", jo.get("attempts"));
- logrecordObj.put("requestURI", jo.get("requestURI"));
- logrecordObj.put("method", jo.get("method"));
- logrecordObj.put("contentType", jo.get("contentType"));
- logrecordObj.put("type", jo.get("type"));
- logrecordObj.put("date", jo.get("date"));
- logrecordObj.put("contentLength", jo.get("contentLength"));
-
- LOGJSONObject newjo = new LOGJSONObject(logrecordObj);
- return newjo;
- }
-
- @Override
- public LOGJSONObject asJSONObject() {
- LOGJSONObject jo = super.asJSONObject();
- jo.put("type", "exp");
- jo.put("expiryReason", reason);
- jo.put("attempts", attempts);
-
- LOGJSONObject newjo = this.reOrderObject(jo);
- return newjo;
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, "exp"); // field 1: type
- super.load(ps); // loads fields 2-8
- ps.setNull (9, Types.VARCHAR);
- ps.setNull (10, Types.VARCHAR);
- ps.setNull (11, Types.VARCHAR);
- ps.setNull (12, Types.INTEGER);
- ps.setInt (13, getSubid());
- ps.setString(14, getFileid());
- ps.setNull (15, Types.INTEGER);
- ps.setInt (16, getAttempts());
- ps.setString(17, getReason());
- ps.setNull (19, Types.BIGINT);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Feed.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Feed.java
deleted file mode 100644
index 4ee5ab96..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Feed.java
+++ /dev/null
@@ -1,760 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.InvalidObjectException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.JSONUtilities;
-import com.att.research.datarouter.provisioning.utils.URLUtilities;
-
-/**
- * The representation of a Feed. Feeds can be retrieved from the DB, or stored/updated in the DB.
- * @author Robert Eby
- * @version $Id: Feed.java,v 1.13 2013/10/28 18:06:52 eby Exp $
- */
-public class Feed extends Syncable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private static int next_feedid = getMaxFeedID() + 1;
-
- private int feedid;
- private int groupid; //New field is added - Groups feature Rally:US708115 - 1610
- private String name;
- private String version;
- private String description;
- private String business_description; // New field is added - Groups feature Rally:US708102 - 1610
- private FeedAuthorization authorization;
- private String publisher;
- private FeedLinks links;
- private boolean deleted;
- private boolean suspended;
- private Date last_mod;
- private Date created_date;
-
- /**
- * Check if a feed ID is valid.
- * @param id the Feed ID
- * @return true if it is valid
- */
- @SuppressWarnings("resource")
- public static boolean isFeedValid(int id) {
- int count = 0;
- try {
- DB db = new DB();
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select COUNT(*) from FEEDS where FEEDID = " + id);
- if (rs.next()) {
- count = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return count != 0;
- }
- /**
- * Get a specific feed from the DB, based upon its ID.
- * @param id the Feed ID
- * @return the Feed object, or null if it does not exist
- */
- public static Feed getFeedById(int id) {
- String sql = "select * from FEEDS where FEEDID = " + id;
- return getFeedBySQL(sql);
- }
- /**
- * Get a specific feed from the DB, based upon its name and version.
- * @param name the name of the Feed
- * @param version the version of the Feed
- * @return the Feed object, or null if it does not exist
- */
- public static Feed getFeedByNameVersion(String name, String version) {
- name = name.replaceAll("'", "''");
- version = version.replaceAll("'", "''");
- String sql = "select * from FEEDS where NAME = '" + name + "' and VERSION ='" + version + "'";
- return getFeedBySQL(sql);
- }
- /**
- * Return a count of the number of active feeds in the DB.
- * @return the count
- */
- public static int countActiveFeeds() {
- int count = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select count(*) from FEEDS where DELETED = 0");
- if (rs.next()) {
- count = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.info("countActiveFeeds: "+e.getMessage());
- e.printStackTrace();
- }
- return count;
- }
- public static int getMaxFeedID() {
- int max = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select MAX(feedid) from FEEDS");
- if (rs.next()) {
- max = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.info("getMaxFeedID: "+e.getMessage());
- e.printStackTrace();
- }
- return max;
- }
- public static Collection getAllFeeds() {
- Map map = new HashMap();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select * from FEEDS");
- while (rs.next()) {
- Feed feed = new Feed(rs);
- map.put(feed.getFeedid(), feed);
- }
- rs.close();
-
- String sql = "select * from FEED_ENDPOINT_IDS";
- rs = stmt.executeQuery(sql);
- while (rs.next()) {
- int id = rs.getInt("FEEDID");
- Feed feed = map.get(id);
- if (feed != null) {
- FeedEndpointID epi = new FeedEndpointID(rs);
- Collection ecoll = feed.getAuthorization().getEndpoint_ids();
- ecoll.add(epi);
- }
- }
- rs.close();
-
- sql = "select * from FEED_ENDPOINT_ADDRS";
- rs = stmt.executeQuery(sql);
- while (rs.next()) {
- int id = rs.getInt("FEEDID");
- Feed feed = map.get(id);
- if (feed != null) {
- Collection acoll = feed.getAuthorization().getEndpoint_addrs();
- acoll.add(rs.getString("ADDR"));
- }
- }
- rs.close();
-
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return map.values();
- }
- public static List getFilteredFeedUrlList(final String name, final String val) {
- List list = new ArrayList();
- String sql = "select SELF_LINK from FEEDS where DELETED = 0";
- if (name.equals("name")) {
- sql += " and NAME = ?";
- } else if (name.equals("publ")) {
- sql += " and PUBLISHER = ?";
- } else if (name.equals("subs")) {
- sql = "select distinct FEEDS.SELF_LINK from FEEDS, SUBSCRIPTIONS " +
- "where DELETED = 0 " +
- "and FEEDS.FEEDID = SUBSCRIPTIONS.FEEDID " +
- "and SUBSCRIPTIONS.SUBSCRIBER = ?";
- }
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(sql);
- if (sql.indexOf('?') >= 0)
- ps.setString(1, val);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String t = rs.getString(1);
- list.add(t.trim());
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return list;
- }
- @SuppressWarnings("resource")
- private static Feed getFeedBySQL(String sql) {
- Feed feed = null;
- try {
- DB db = new DB();
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- if (rs.next()) {
- feed = new Feed(rs);
- rs.close();
-
- sql = "select * from FEED_ENDPOINT_IDS where FEEDID = " + feed.feedid;
- rs = stmt.executeQuery(sql);
- Collection ecoll = feed.getAuthorization().getEndpoint_ids();
- while (rs.next()) {
- FeedEndpointID epi = new FeedEndpointID(rs);
- ecoll.add(epi);
- }
- rs.close();
-
- sql = "select * from FEED_ENDPOINT_ADDRS where FEEDID = " + feed.feedid;
- rs = stmt.executeQuery(sql);
- Collection acoll = feed.getAuthorization().getEndpoint_addrs();
- while (rs.next()) {
- acoll.add(rs.getString("ADDR"));
- }
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return feed;
- }
-
- public Feed() {
- this("", "", "","");
- }
-
- public Feed(String name, String version, String desc,String business_description) {
- this.feedid = -1;
- this.groupid = -1; //New field is added - Groups feature Rally:US708115 - 1610
- this.name = name;
- this.version = version;
- this.description = desc;
- this.business_description=business_description; // New field is added - Groups feature Rally:US708102 - 1610
- this.authorization = new FeedAuthorization();
- this.publisher = "";
- this.links = new FeedLinks();
- this.deleted = false;
- this.suspended = false;
- this.last_mod = new Date();
- this.created_date = new Date();
- }
- public Feed(ResultSet rs) throws SQLException {
- this.feedid = rs.getInt("FEEDID");
- this.groupid = rs.getInt("GROUPID"); //New field is added - Groups feature Rally:US708115 - 1610
- this.name = rs.getString("NAME");
- this.version = rs.getString("VERSION");
- this.description = rs.getString("DESCRIPTION");
- this.business_description=rs.getString("BUSINESS_DESCRIPTION"); // New field is added - Groups feature Rally:US708102 - 1610
- this.authorization = new FeedAuthorization();
- this.authorization.setClassification(rs.getString("AUTH_CLASS"));
- this.publisher = rs.getString("PUBLISHER");
- this.links = new FeedLinks();
- this.links.setSelf(rs.getString("SELF_LINK"));
- this.links.setPublish(rs.getString("PUBLISH_LINK"));
- this.links.setSubscribe(rs.getString("SUBSCRIBE_LINK"));
- this.links.setLog(rs.getString("LOG_LINK"));
- this.deleted = rs.getBoolean("DELETED");
- this.suspended = rs.getBoolean("SUSPENDED");
- this.last_mod = rs.getDate("LAST_MOD");
- this.created_date = rs.getTimestamp("CREATED_DATE");
- }
- public Feed(JSONObject jo) throws InvalidObjectException {
- this("", "", "","");
- try {
- // The JSONObject is assumed to contain a vnd.att-dr.feed representation
- this.feedid = jo.optInt("feedid", -1);
- this.groupid = jo.optInt("groupid"); //New field is added - Groups feature Rally:US708115 - 1610
- this.name = jo.getString("name");
- if (name.length() > 255)
- throw new InvalidObjectException("name field is too long");
- this.version = jo.getString("version");
- if (version.length() > 20)
- throw new InvalidObjectException("version field is too long");
- this.description = jo.optString("description");
- this.business_description = jo.optString("business_description"); // New field is added - Groups feature Rally:US708102 - 1610
- if (description.length() > 1000)
- throw new InvalidObjectException("technical description field is too long");
-
- if (business_description.length() > 1000) // New field is added - Groups feature Rally:US708102 - 1610
- throw new InvalidObjectException("business description field is too long");
-
- this.authorization = new FeedAuthorization();
- JSONObject jauth = jo.getJSONObject("authorization");
- this.authorization.setClassification(jauth.getString("classification"));
- if (this.authorization.getClassification().length() > 32)
- throw new InvalidObjectException("classification field is too long");
- JSONArray ja = jauth.getJSONArray("endpoint_ids");
- for (int i = 0; i < ja.length(); i++) {
- JSONObject id = ja.getJSONObject(i);
- FeedEndpointID fid = new FeedEndpointID(id.getString("id"), id.getString("password"));
- if (fid.getId().length() > 20)
- throw new InvalidObjectException("id field is too long ("+fid.getId()+")");
- if (fid.getPassword().length() > 32)
- throw new InvalidObjectException("password field is too long ("+fid.getPassword()+")");
- this.authorization.getEndpoint_ids().add(fid);
- }
- if (this.authorization.getEndpoint_ids().size() < 1)
- throw new InvalidObjectException("need to specify at least one endpoint_id");
- ja = jauth.getJSONArray("endpoint_addrs");
- for (int i = 0; i < ja.length(); i++) {
- String addr = ja.getString(i);
- if (!JSONUtilities.validIPAddrOrSubnet(addr))
- throw new InvalidObjectException("bad IP addr or subnet mask: "+addr);
- this.authorization.getEndpoint_addrs().add(addr);
- }
-
- this.publisher = jo.optString("publisher", "");
- this.deleted = jo.optBoolean("deleted", false);
- this.suspended = jo.optBoolean("suspend", false);
- JSONObject jol = jo.optJSONObject("links");
- this.links = (jol == null) ? (new FeedLinks()) : (new FeedLinks(jol));
- } catch (InvalidObjectException e) {
- throw e;
- } catch (Exception e) {
- throw new InvalidObjectException("invalid JSON: "+e.getMessage());
- }
- }
- public int getFeedid() {
- return feedid;
- }
- public void setFeedid(int feedid) {
- this.feedid = feedid;
-
- // Create link URLs
- FeedLinks fl = getLinks();
- fl.setSelf(URLUtilities.generateFeedURL(feedid));
- fl.setPublish(URLUtilities.generatePublishURL(feedid));
- fl.setSubscribe(URLUtilities.generateSubscribeURL(feedid));
- fl.setLog(URLUtilities.generateFeedLogURL(feedid));
- }
-
- //new getter setters for groups- Rally:US708115 - 1610
- public int getGroupid() {
- return groupid;
- }
-
- public void setGroupid(int groupid) {
- this.groupid = groupid;
- }
-
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
- }
- public String getVersion() {
- return version;
- }
- public void setVersion(String version) {
- this.version = version;
- }
- public String getDescription() {
- return description;
- }
- public void setDescription(String description) {
- this.description = description;
- }
- // New field is added - Groups feature Rally:US708102 - 1610
- public String getBusiness_description() {
- return business_description;
- }
-
- public void setBusiness_description(String business_description) {
- this.business_description = business_description;
- }
-
- public FeedAuthorization getAuthorization() {
- return authorization;
- }
- public void setAuthorization(FeedAuthorization authorization) {
- this.authorization = authorization;
- }
- public String getPublisher() {
- return publisher;
- }
- public void setPublisher(String publisher) {
- if (publisher != null) {
- if (publisher.length() > 8)
- publisher = publisher.substring(0, 8);
- this.publisher = publisher;
- }
- }
- public FeedLinks getLinks() {
- return links;
- }
- public void setLinks(FeedLinks links) {
- this.links = links;
- }
-
- public boolean isDeleted() {
- return deleted;
- }
-
- public void setDeleted(boolean deleted) {
- this.deleted = deleted;
- }
-
- public boolean isSuspended() {
- return suspended;
- }
-
- public void setSuspended(boolean suspended) {
- this.suspended = suspended;
- }
-
- public Date getLast_mod() {
- return last_mod;
- }
-
- public Date getCreated_date() {
- return created_date;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("feedid", feedid);
- jo.put("groupid", groupid); //New field is added - Groups feature Rally:US708115 - 1610
- jo.put("name", name);
- jo.put("version", version);
- jo.put("description", description);
- jo.put("business_description", business_description); // New field is added - Groups feature Rally:US708102 - 1610
- jo.put("authorization", authorization.asJSONObject());
- jo.put("publisher", publisher);
- jo.put("links", links.asJSONObject());
- jo.put("deleted", deleted);
- jo.put("suspend", suspended);
- jo.put("last_mod", last_mod.getTime());
- jo.put("created_date", created_date.getTime());
- return jo;
- }
- public JSONObject asLimitedJSONObject() {
- JSONObject jo = asJSONObject();
- jo.remove("deleted");
- jo.remove("feedid");
- jo.remove("last_mod");
- jo.remove("created_date");
- return jo;
- }
- public JSONObject asJSONObject(boolean hidepasswords) {
- JSONObject jo = asJSONObject();
- if (hidepasswords) {
- jo.remove("feedid"); // we no longer hide passwords, however we do hide these
- jo.remove("deleted");
- jo.remove("last_mod");
- jo.remove("created_date");
- }
- return jo;
- }
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "delete from FEEDS where FEEDID = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, feedid);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public synchronized boolean doInsert(Connection c) {
- boolean rv = true;
-// PreparedStatement ps = null;
- try {
- if (feedid == -1) {
-// // Get the next feedid
-// String sql = "insert into FEEDS_UNIQUEID (FEEDID) values (0)";
-// ps = c.prepareStatement(sql, new String[] { "FEEDID" });
-// ps.execute();
-// ResultSet rs = ps.getGeneratedKeys();
-// rs.first();
-// setFeedid(rs.getInt(1));
- // No feed ID assigned yet, so assign the next available one
- setFeedid(next_feedid++);
- }
- // In case we insert a feed from synchronization
- if (feedid > next_feedid)
- next_feedid = feedid+1;
-
- // Create FEED_ENDPOINT_IDS rows
- FeedAuthorization auth = getAuthorization();
- String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)";
- PreparedStatement ps2 = c.prepareStatement(sql);
- for (FeedEndpointID fid : auth.getEndpoint_ids()) {
- ps2.setInt(1, feedid);
- ps2.setString(2, fid.getId());
- ps2.setString(3, fid.getPassword());
- ps2.executeUpdate();
- }
- ps2.close();
-
- // Create FEED_ENDPOINT_ADDRS rows
- sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)";
- ps2 = c.prepareStatement(sql);
- for (String t : auth.getEndpoint_addrs()) {
- ps2.setInt(1, feedid);
- ps2.setString(2, t);
- ps2.executeUpdate();
- }
- ps2.close();
-
- // Finally, create the FEEDS row
- sql = "insert into FEEDS (FEEDID, NAME, VERSION, DESCRIPTION, AUTH_CLASS, PUBLISHER, SELF_LINK, PUBLISH_LINK, SUBSCRIBE_LINK, LOG_LINK, DELETED, SUSPENDED,BUSINESS_DESCRIPTION, GROUPID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?)";
- ps2 = c.prepareStatement(sql);
- ps2.setInt(1, feedid);
- ps2.setString(2, getName());
- ps2.setString(3, getVersion());
- ps2.setString(4, getDescription());
- ps2.setString(5, getAuthorization().getClassification());
- ps2.setString(6, getPublisher());
- ps2.setString(7, getLinks().getSelf());
- ps2.setString(8, getLinks().getPublish());
- ps2.setString(9, getLinks().getSubscribe());
- ps2.setString(10, getLinks().getLog());
- ps2.setBoolean(11, isDeleted());
- ps2.setBoolean(12, isSuspended());
- ps2.setString(13,getBusiness_description()); // New field is added - Groups feature Rally:US708102 - 1610
- ps2.setInt(14,groupid); //New field is added - Groups feature Rally:US708115 - 1610
- ps2.executeUpdate();
- ps2.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
-// } finally {
-// try {
-// ps.close();
-// } catch (SQLException e) {
-// e.printStackTrace();
-// }
- }
- return rv;
- }
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- Feed oldobj = getFeedById(feedid);
- PreparedStatement ps = null;
- try {
- Set newset = getAuthorization().getEndpoint_ids();
- Set oldset = oldobj.getAuthorization().getEndpoint_ids();
-
- // Insert new FEED_ENDPOINT_IDS rows
- String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)";
- ps = c.prepareStatement(sql);
- for (FeedEndpointID fid : newset) {
- if (!oldset.contains(fid)) {
- ps.setInt(1, feedid);
- ps.setString(2, fid.getId());
- ps.setString(3, fid.getPassword());
- ps.executeUpdate();
- }
- }
- ps.close();
-
- // Delete old FEED_ENDPOINT_IDS rows
- sql = "delete from FEED_ENDPOINT_IDS where FEEDID = ? AND USERID = ? AND PASSWORD = ?";
- ps = c.prepareStatement(sql);
- for (FeedEndpointID fid : oldset) {
- if (!newset.contains(fid)) {
- ps.setInt(1, feedid);
- ps.setString(2, fid.getId());
- ps.setString(3, fid.getPassword());
- ps.executeUpdate();
- }
- }
- ps.close();
-
- // Insert new FEED_ENDPOINT_ADDRS rows
- Set newset2 = getAuthorization().getEndpoint_addrs();
- Set oldset2 = oldobj.getAuthorization().getEndpoint_addrs();
- sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)";
- ps = c.prepareStatement(sql);
- for (String t : newset2) {
- if (!oldset2.contains(t)) {
- ps.setInt(1, feedid);
- ps.setString(2, t);
- ps.executeUpdate();
- }
- }
- ps.close();
-
- // Delete old FEED_ENDPOINT_ADDRS rows
- sql = "delete from FEED_ENDPOINT_ADDRS where FEEDID = ? AND ADDR = ?";
- ps = c.prepareStatement(sql);
- for (String t : oldset2) {
- if (!newset2.contains(t)) {
- ps.setInt(1, feedid);
- ps.setString(2, t);
- ps.executeUpdate();
- }
- }
- ps.close();
-
- // Finally, update the FEEDS row
- sql = "update FEEDS set DESCRIPTION = ?, AUTH_CLASS = ?, DELETED = ?, SUSPENDED = ?, BUSINESS_DESCRIPTION=?, GROUPID=? where FEEDID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, getDescription());
- ps.setString(2, getAuthorization().getClassification());
- ps.setInt(3, deleted ? 1 : 0);
- ps.setInt(4, suspended ? 1 : 0);
- ps.setString(5, getBusiness_description()); // New field is added - Groups feature Rally:US708102 - 1610
- ps.setInt(6, groupid); //New field is added - Groups feature Rally:US708115 - 1610
- ps.setInt(7, feedid);
- ps.executeUpdate();
- ps.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- if (ps != null)
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- /**Rally US708115
- * Change Ownership of FEED - 1610
- * */
- public boolean changeOwnerShip() {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
-
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection c = db.getConnection();
- String sql = "update FEEDS set PUBLISHER = ? where FEEDID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, this.publisher);
- ps.setInt(2, feedid);
- ps.execute();
- ps.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
-
- @Override
- public String getKey() {
- return ""+getFeedid();
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof Feed))
- return false;
- Feed of = (Feed) obj;
- if (feedid != of.feedid)
- return false;
- if (groupid != of.groupid) //New field is added - Groups feature Rally:US708115 - 1610
- return false;
- if (!name.equals(of.name))
- return false;
- if (!version.equals(of.version))
- return false;
- if (!description.equals(of.description))
- return false;
- if (!business_description.equals(of.business_description)) // New field is added - Groups feature Rally:US708102 - 1610
- return false;
- if (!publisher.equals(of.publisher))
- return false;
- if (!authorization.equals(of.authorization))
- return false;
- if (!links.equals(of.links))
- return false;
- if (deleted != of.deleted)
- return false;
- if (suspended != of.suspended)
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "FEED: feedid=" + feedid + ", name=" + name + ", version=" + version;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedAuthorization.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedAuthorization.java
deleted file mode 100644
index 5701ce9f..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedAuthorization.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-/**
- * The representation of a Feed authorization. This encapsulates the authorization information about a feed.
- * @author Robert Eby
- * @version $Id: FeedAuthorization.java,v 1.2 2013/06/20 14:11:05 eby Exp $
- */
-public class FeedAuthorization implements JSONable {
- private String classification;
- private Set endpoint_ids;
- private Set endpoint_addrs;
-
- public FeedAuthorization() {
- this.classification = "";
- this.endpoint_ids = new HashSet();
- this.endpoint_addrs = new HashSet();
- }
- public String getClassification() {
- return classification;
- }
- public void setClassification(String classification) {
- this.classification = classification;
- }
- public Set getEndpoint_ids() {
- return endpoint_ids;
- }
- public void setEndpoint_ids(Set endpoint_ids) {
- this.endpoint_ids = endpoint_ids;
- }
- public Set getEndpoint_addrs() {
- return endpoint_addrs;
- }
- public void setEndpoint_addrs(Set endpoint_addrs) {
- this.endpoint_addrs = endpoint_addrs;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("classification", classification);
- JSONArray ja = new JSONArray();
- for (FeedEndpointID eid : endpoint_ids) {
- ja.put(eid.asJSONObject());
- }
- jo.put("endpoint_ids", ja);
- ja = new JSONArray();
- for (String t : endpoint_addrs) {
- ja.put(t);
- }
- jo.put("endpoint_addrs", ja);
- return jo;
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof FeedAuthorization))
- return false;
- FeedAuthorization of = (FeedAuthorization) obj;
- if (!classification.equals(of.classification))
- return false;
- if (!endpoint_ids.equals(of.endpoint_ids))
- return false;
- if (!endpoint_addrs.equals(of.endpoint_addrs))
- return false;
- return true;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedEndpointID.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedEndpointID.java
deleted file mode 100644
index f009c64c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedEndpointID.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.json.JSONObject;
-
-/**
- * The representation of a Feed endpoint. This contains a login/password pair.
- * @author Robert Eby
- * @version $Id: FeedEndpointID.java,v 1.1 2013/04/26 21:00:26 eby Exp $
- */
-public class FeedEndpointID implements JSONable {
- private String id;
- private String password;
-
- public FeedEndpointID() {
- this("", "");
- }
- public FeedEndpointID(String id, String password) {
- this.id = id;
- this.password = password;
- }
- public FeedEndpointID(ResultSet rs) throws SQLException {
- this.id = rs.getString("USERID");
- this.password = rs.getString("PASSWORD");
- }
-
- public String getId() {
- return id;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public String getPassword() {
- return password;
- }
-
- public void setPassword(String password) {
- this.password = password;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("id", id);
- jo.put("password", password);
- return jo;
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof FeedEndpointID))
- return false;
- FeedEndpointID f2 = (FeedEndpointID) obj;
- return id.equals(f2.id) && password.equals(f2.password);
- }
- @Override
- public int hashCode() {
- return (id + ":" + password).hashCode();
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedLinks.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedLinks.java
deleted file mode 100644
index ccce9c4b..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/FeedLinks.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.InvalidObjectException;
-
-import org.json.JSONObject;
-
-/**
- * The URLs associated with a Feed.
- * @author Robert Eby
- * @version $Id: FeedLinks.java,v 1.3 2013/07/05 13:48:05 eby Exp $
- */
-public class FeedLinks implements JSONable {
- private String self;
- private String publish;
- private String subscribe;
- private String log;
-
- public FeedLinks() {
- self = publish = subscribe = log = null;
- }
-
- public FeedLinks(JSONObject jo) throws InvalidObjectException {
- this();
- self = jo.getString("self");
- publish = jo.getString("publish");
- subscribe = jo.getString("subscribe");
- log = jo.getString("log");
- }
-
- public String getSelf() {
- return self;
- }
- public void setSelf(String self) {
- this.self = self;
- }
- public String getPublish() {
- return publish;
- }
- public void setPublish(String publish) {
- this.publish = publish;
- }
- public String getSubscribe() {
- return subscribe;
- }
- public void setSubscribe(String subscribe) {
- this.subscribe = subscribe;
- }
- public String getLog() {
- return log;
- }
- public void setLog(String log) {
- this.log = log;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("self", self);
- jo.put("publish", publish);
- jo.put("subscribe", subscribe);
- jo.put("log", log);
- return jo;
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof FeedLinks))
- return false;
- FeedLinks of = (FeedLinks) obj;
- if (!self.equals(of.self))
- return false;
- if (!publish.equals(of.publish))
- return false;
- if (!subscribe.equals(of.subscribe))
- return false;
- if (!log.equals(of.log))
- return false;
- return true;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Group.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Group.java
deleted file mode 100644
index 3f55b00a..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Group.java
+++ /dev/null
@@ -1,417 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.InvalidObjectException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.URLUtilities;
-
-/**
- * The representation of a Subscription. Subscriptions can be retrieved from the DB, or stored/updated in the DB.
- * @author vikram
- * @version $Id: Group.java,v 1.0 2016/07/19
- */
-public class Group extends Syncable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private static int next_groupid = getMaxGroupID() + 1;
-
- private int groupid;
- private String authid;
- private String name;
- private String description;
- private String classification;
- private String members;
- private Date last_mod;
-
-
- public static Group getGroupMatching(Group gup) {
- String sql = String.format(
- "select * from GROUPS where NAME = \"%s\"",
- gup.getName()
- );
- List list = getGroupsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
-
- public static Group getGroupMatching(Group gup, int groupid) {
- String sql = String.format(
- "select * from GROUPS where NAME = \"%s\" and GROUPID != %d ",
- gup.getName(),
- gup.getGroupid()
- );
- List list = getGroupsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
-
- public static Group getGroupById(int id) {
- String sql = "select * from GROUPS where GROUPID = " + id;
- List list = getGroupsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
-
- public static Group getGroupByAuthId(String id) {
- String sql = "select * from GROUPS where AUTHID = '" + id +"'";
- List list = getGroupsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
-
- public static Collection getAllgroups() {
- return getGroupsForSQL("select * from GROUPS");
- }
- private static List getGroupsForSQL(String sql) {
- List list = new ArrayList();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- Group group = new Group(rs);
- list.add(group);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return list;
- }
- public static int getMaxGroupID() {
- int max = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select MAX(groupid) from GROUPS");
- if (rs.next()) {
- max = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.info("getMaxSubID: "+e.getMessage());
- e.printStackTrace();
- }
- return max;
- }
- public static Collection getGroupsByClassfication(String classfication) {
- List list = new ArrayList();
- String sql = "select * from GROUPS where classification = '"+classfication+"'";
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- int groupid = rs.getInt("groupid");
- //list.add(URLUtilities.generateSubscriptionURL(groupid));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return list;
- }
- /**
- * Return a count of the number of active subscriptions in the DB.
- * @return the count
- */
- public static int countActiveSubscriptions() {
- int count = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select count(*) from SUBSCRIPTIONS");
- if (rs.next()) {
- count = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.warn("PROV0008 countActiveSubscriptions: "+e.getMessage());
- e.printStackTrace();
- }
- return count;
- }
-
- public Group() {
- this("", "", "");
- }
- public Group(String name, String desc, String members) {
- this.groupid = -1;
- this.authid = "";
- this.name = name;
- this.description = desc;
- this.members = members;
- this.classification = "";
- this.last_mod = new Date();
- }
-
-
- public Group(ResultSet rs) throws SQLException {
- this.groupid = rs.getInt("GROUPID");
- this.authid = rs.getString("AUTHID");
- this.name = rs.getString("NAME");
- this.description = rs.getString("DESCRIPTION");
- this.classification = rs.getString("CLASSIFICATION");
- this.members = rs.getString("MEMBERS");
- this.last_mod = rs.getDate("LAST_MOD");
- }
-
-
-
- public Group(JSONObject jo) throws InvalidObjectException {
- this("", "", "");
- try {
- // The JSONObject is assumed to contain a vnd.att-dr.group representation
- this.groupid = jo.optInt("groupid", -1);
- String gname = jo.getString("name");
- String gdescription = jo.getString("description");
-
- this.authid = jo.getString("authid");
- this.name = gname;
- this.description = gdescription;
- this.classification = jo.getString("classification");
- this.members = jo.getString("members");
-
- if (gname.length() > 50)
- throw new InvalidObjectException("Group name is too long");
- if (gdescription.length() > 256)
- throw new InvalidObjectException("Group Description is too long");
- } catch (InvalidObjectException e) {
- throw e;
- } catch (Exception e) {
- throw new InvalidObjectException("invalid JSON: "+e.getMessage());
- }
- }
- public int getGroupid() {
- return groupid;
- }
-
- public static Logger getIntlogger() {
- return intlogger;
- }
- public void setGroupid(int groupid) {
- this.groupid = groupid;
- }
-
- public static void setIntlogger(Logger intlogger) {
- Group.intlogger = intlogger;
- }
- public static int getNext_groupid() {
- return next_groupid;
- }
- public static void setNext_groupid(int next_groupid) {
- Group.next_groupid = next_groupid;
- }
- public String getAuthid() {
- return authid;
- }
- public void setAuthid(String authid) {
- this.authid = authid;
- }
- public String getName() {
- return name;
- }
- public void setName(String name) {
- this.name = name;
- }
- public String getDescription() {
- return description;
- }
- public void setDescription(String description) {
- this.description = description;
- }
- public String getClassification() {
- return classification;
- }
- public void setClassification(String classification) {
- this.classification = classification;
- }
- public String getMembers() {
- return members;
- }
- public void setMembers(String members) {
- this.members = members;
- }
- public Date getLast_mod() {
- return last_mod;
- }
- public void setLast_mod(Date last_mod) {
- this.last_mod = last_mod;
- }
-
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("groupid", groupid);
- jo.put("authid", authid);
- jo.put("name", name);
- jo.put("description", description);
- jo.put("classification", classification);
- jo.put("members", members);
- jo.put("last_mod", last_mod.getTime());
- return jo;
- }
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- if (groupid == -1) {
- // No feed ID assigned yet, so assign the next available one
- setGroupid(next_groupid++);
- }
- // In case we insert a gropup from synchronization
- if (groupid > next_groupid)
- next_groupid = groupid+1;
-
-
- // Create the GROUPS row
- String sql = "insert into GROUPS (GROUPID, AUTHID, NAME, DESCRIPTION, CLASSIFICATION, MEMBERS) values (?, ?, ?, ?, ?, ?)";
- ps = c.prepareStatement(sql, new String[] { "GROUPID" });
- ps.setInt(1, groupid);
- ps.setString(2, authid);
- ps.setString(3, name);
- ps.setString(4, description);
- ps.setString(5, classification);
- ps.setString(6, members);
- ps.execute();
- ps.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "update GROUPS set AUTHID = ?, NAME = ?, DESCRIPTION = ?, CLASSIFICATION = ? , MEMBERS = ? where GROUPID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, authid);
- ps.setString(2, name);
- ps.setString(3, description);
- ps.setString(4, classification);
- ps.setString(5, members);
- ps.setInt(6, groupid);
- ps.executeUpdate();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "delete from GROUPS where GROUPID = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, groupid);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public String getKey() {
- return ""+getGroupid();
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof Group))
- return false;
- Group os = (Group) obj;
- if (groupid != os.groupid)
- return false;
- if (authid != os.authid)
- return false;
- if (!name.equals(os.name))
- return false;
- if (description != os.description)
- return false;
- if (!classification.equals(os.classification))
- return false;
- if (!members.equals(os.members))
- return false;
-
- return true;
- }
-
- @Override
- public String toString() {
- return "GROUP: groupid=" + groupid;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/IngressRoute.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/IngressRoute.java
deleted file mode 100644
index a9ea9bc7..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/IngressRoute.java
+++ /dev/null
@@ -1,542 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.log4j.Logger;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * The representation of one route in the Ingress Route Table.
- *
- * @author Robert P. Eby
- * @version $Id: IngressRoute.java,v 1.3 2013/12/16 20:30:23 eby Exp $
- */
-public class IngressRoute extends NodeClass implements Comparable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private final int seq;
- private final int feedid;
- private final String userid;
- private final String subnet;
- private int nodelist;
- private SortedSet nodes;
-
- /**
- * Get all IngressRoutes in the database, sorted in order according to their sequence field.
- * @return a sorted set of IngressRoutes
- */
- public static SortedSet getAllIngressRoutes() {
- return getAllIngressRoutesForSQL("select SEQUENCE, FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES");
- }
- /**
- * Get all IngressRoutes in the database with a particular sequence number.
- * @param seq the sequence number
- * @return a set of IngressRoutes
- */
- public static Set getIngressRoutesForSeq(int seq) {
- return getAllIngressRoutesForSQL("select SEQUENCE, FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES where SEQUENCE = "+seq);
- }
- private static SortedSet getAllIngressRoutesForSQL(String sql) {
- SortedSet set = new TreeSet();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- int seq = rs.getInt("SEQUENCE");
- int feedid = rs.getInt("FEEDID");
- String user = rs.getString("USERID");
- String subnet = rs.getString("SUBNET");
- int nodeset = rs.getInt("NODESET");
- set.add(new IngressRoute(seq, feedid, user, subnet, nodeset));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return set;
- }
-
- /**
- * Get the maximum node set ID in use in the DB.
- * @return the integer value of the maximum
- */
- public static int getMaxNodeSetID() {
- return getMax("select max(SETID) as MAX from NODESETS");
- }
- /**
- * Get the maximum node sequence number in use in the DB.
- * @return the integer value of the maximum
- */
- public static int getMaxSequence() {
- return getMax("select max(SEQUENCE) as MAX from INGRESS_ROUTES");
- }
- private static int getMax(String sql) {
- int rv = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- if (rs.next()) {
- rv = rs.getInt("MAX");
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return rv;
- }
-
- /**
- * Get an Ingress Route for a particular feed ID, user, and subnet
- * @param feedid the Feed ID to look for
- * @param user the user name to look for
- * @param subnet the subnet to look for
- * @return the Ingress Route, or null of there is none
- */
- public static IngressRoute getIngressRoute(int feedid, String user, String subnet) {
- IngressRoute v = null;
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- String sql = "select SEQUENCE, NODESET from INGRESS_ROUTES where FEEDID = ? AND USERID = ? and SUBNET = ?";
- ps = conn.prepareStatement(sql);
- ps.setInt(1, feedid);
- ps.setString(2, user);
- ps.setString(3, subnet);
- ResultSet rs = ps.executeQuery();
- if (rs.next()) {
- int seq = rs.getInt("SEQUENCE");
- int nodeset = rs.getInt("NODESET");
- v = new IngressRoute(seq, feedid, user, subnet, nodeset);
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return v;
- }
-
- /**
- * Get a collection of all Ingress Routes with a particular sequence number.
- * @param seq the sequence number to look for
- * @return the collection (may be empty).
- */
- public static Collection getIngressRoute(int seq) {
- Collection rv = new ArrayList();
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- String sql = "select FEEDID, USERID, SUBNET, NODESET from INGRESS_ROUTES where SEQUENCE = ?";
- ps = conn.prepareStatement(sql);
- ps.setInt(1, seq);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- int feedid = rs.getInt("FEEDID");
- String user = rs.getString("USERID");
- String subnet = rs.getString("SUBNET");
- int nodeset = rs.getInt("NODESET");
- rv.add(new IngressRoute(seq, feedid, user, subnet, nodeset));
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- public IngressRoute(int seq, int feedid, String user, String subnet, Collection nodes)
- throws IllegalArgumentException
- {
- this(seq, feedid, user, subnet);
- this.nodelist = -1;
- this.nodes = new TreeSet(nodes);
- }
-
- public IngressRoute(int seq, int feedid, String user, String subnet, int nodeset)
- throws IllegalArgumentException
- {
- this(seq, feedid, user, subnet);
- this.nodelist = nodeset;
- this.nodes = new TreeSet(readNodes());
- }
-
- private IngressRoute(int seq, int feedid, String user, String subnet)
- throws IllegalArgumentException
- {
- this.seq = seq;
- this.feedid = feedid;
- this.userid = (user == null) ? "-" : user;
- this.subnet = (subnet == null) ? "-" : subnet;
- this.nodelist = -1;
- this.nodes = null;
- if (Feed.getFeedById(feedid) == null)
- throw new IllegalArgumentException("No such feed: "+feedid);
- if (!this.subnet.equals("-")) {
- SubnetMatcher sm = new SubnetMatcher(subnet);
- if (!sm.isValid())
- throw new IllegalArgumentException("Invalid subnet: "+subnet);
- }
- }
-
- public IngressRoute(JSONObject jo) {
- this.seq = jo.optInt("seq");
- this.feedid = jo.optInt("feedid");
- String t = jo.optString("user");
- this.userid = t.equals("") ? "-" : t;
- t = jo.optString("subnet");
- this.subnet = t.equals("") ? "-" : t;
- this.nodelist = -1;
- this.nodes = new TreeSet();
- JSONArray ja = jo.getJSONArray("node");
- for (int i = 0; i < ja.length(); i++)
- this.nodes.add(ja.getString(i));
- }
- /**
- * Does this particular IngressRoute match a request, represented by feedid and req?
- * To match, feedid must match the feed ID in the route, the user in the route
- * (if specified) must match the user in the request, and the subnet in the route (if specified)
- * must match the subnet from the request.
- * @param feedid the feedid for this request
- * @param req the remainder of the request
- * @return true if a match, false otherwise
- */
- public boolean matches(int feedid, HttpServletRequest req) {
- // Check feedid
- if (this.feedid != feedid)
- return false;
-
- // Get user from request and compare
- // Note: we don't check the password; the node will do that
- if (userid.length() > 0 && !userid.equals("-")) {
- String credentials = req.getHeader("Authorization");
- if (credentials == null || !credentials.startsWith("Basic "))
- return false;
- String t = new String(Base64.decodeBase64(credentials.substring(6)));
- int ix = t.indexOf(':');
- if (ix >= 0)
- t = t.substring(0, ix);
- if (!t.equals(this.userid))
- return false;
- }
-
- // If this route has a subnet, match it against the requester's IP addr
- if (subnet.length() > 0 && !subnet.equals("-")) {
- try {
- InetAddress inet = InetAddress.getByName(req.getRemoteAddr());
- SubnetMatcher sm = new SubnetMatcher(subnet);
- return sm.matches(inet.getAddress());
- } catch (UnknownHostException e) {
- return false;
- }
- }
- return true;
- }
-
- /**
- * Compare IP addresses as byte arrays to a subnet specified as a CIDR.
- * Taken from com.att.research.datarouter.node.SubnetMatcher and modified somewhat.
- */
- public class SubnetMatcher {
- private byte[] sn;
- private int len;
- private int mask;
- private boolean valid;
-
- /**
- * Construct a subnet matcher given a CIDR
- * @param subnet The CIDR to match
- */
- public SubnetMatcher(String subnet) {
- int i = subnet.lastIndexOf('/');
- if (i == -1) {
- try {
- sn = InetAddress.getByName(subnet).getAddress();
- len = sn.length;
- valid = true;
- } catch (UnknownHostException e) {
- len = 0;
- valid = false;
- }
- mask = 0;
- } else {
- int n = Integer.parseInt(subnet.substring(i + 1));
- try {
- sn = InetAddress.getByName(subnet.substring(0, i)).getAddress();
- valid = true;
- } catch (UnknownHostException e) {
- valid = false;
- }
- len = n / 8;
- mask = ((0xff00) >> (n % 8)) & 0xff;
- }
- }
- public boolean isValid() {
- return valid;
- }
- /**
- * Is the IP address in the CIDR?
- * @param addr the IP address as bytes in network byte order
- * @return true if the IP address matches.
- */
- public boolean matches(byte[] addr) {
- if (!valid || addr.length != sn.length) {
- return false;
- }
- for (int i = 0; i < len; i++) {
- if (addr[i] != sn[i]) {
- return false;
- }
- }
- if (mask != 0 && ((addr[len] ^ sn[len]) & mask) != 0) {
- return false;
- }
- return true;
- }
- }
-
- /**
- * Get the list of node names for this route.
- * @return the list
- */
- public SortedSet getNodes() {
- return this.nodes;
- }
-
- private Collection readNodes() {
- Collection set = new TreeSet();
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- String sql = "select NODEID from NODESETS where SETID = ?";
- ps = conn.prepareStatement(sql);
- ps.setInt(1, nodelist);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- int id = rs.getInt("NODEID");
- set.add(lookupNodeID(id));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return set;
- }
-
- /**
- * Delete the IRT route having this IngressRoutes feed ID, user ID, and subnet from the database.
- * @return true if the delete succeeded
- */
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- ps = c.prepareStatement("delete from INGRESS_ROUTES where FEEDID = ? and USERID = ? and SUBNET = ?");
- ps.setInt(1, feedid);
- ps.setString(2, userid);
- ps.setString(3, subnet);
- ps.execute();
- ps.close();
-
- ps = c.prepareStatement("delete from NODESETS where SETID = ?");
- ps.setInt(1, nodelist);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @SuppressWarnings("resource")
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = false;
- PreparedStatement ps = null;
- try {
- // Create the NODESETS rows & set nodelist
- int set = getMaxNodeSetID() + 1;
- this.nodelist = set;
- for (String node : nodes) {
- int id = lookupNodeName(node);
- ps = c.prepareStatement("insert into NODESETS (SETID, NODEID) values (?,?)");
- ps.setInt(1, this.nodelist);
- ps.setInt(2, id);
- ps.execute();
- ps.close();
- }
-
- // Create the INGRESS_ROUTES row
- ps = c.prepareStatement("insert into INGRESS_ROUTES (SEQUENCE, FEEDID, USERID, SUBNET, NODESET) values (?, ?, ?, ?, ?)");
- ps.setInt(1, this.seq);
- ps.setInt(2, this.feedid);
- ps.setString(3, this.userid);
- ps.setString(4, this.subnet);
- ps.setInt(5, this.nodelist);
- ps.execute();
- ps.close();
- rv = true;
- } catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public boolean doUpdate(Connection c) {
- return doDelete(c) && doInsert(c);
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("feedid", feedid);
- // Note: for user and subnet, null, "", and "-" are equivalent
- if (userid != null && !userid.equals("-") && !userid.equals(""))
- jo.put("user", userid);
- if (subnet != null && !subnet.equals("-") && !subnet.equals(""))
- jo.put("subnet", subnet);
- jo.put("seq", seq);
- jo.put("node", nodes);
- return jo;
- }
-
- @Override
- public String getKey() {
- return String.format("%d/%s/%s/%d", feedid, (userid == null)?"":userid, (subnet == null)?"":subnet, seq);
- }
-
- @Override
- public int hashCode() {
- return toString().hashCode();
- }
-
- @Override
- public boolean equals(Object obj) {
- try {
- if (!(obj instanceof IngressRoute))
- return false;
- return this.compareTo((IngressRoute) obj) == 0;
- } catch (NullPointerException e) {
- return false;
- }
- }
-
- @Override
- public int compareTo(IngressRoute in) {
- if (in == null)
- throw new NullPointerException();
- int n = this.feedid - in.feedid;
- if (n != 0)
- return n;
- n = this.seq - in.seq;
- if (n != 0)
- return n;
- n = this.userid.compareTo(in.userid);
- if (n != 0)
- return n;
- n = this.subnet.compareTo(in.subnet);
- if (n != 0)
- return n;
- return this.nodes.equals(in.nodes) ? 0 : 1;
- }
-
- @Override
- public String toString() {
- return String.format("INGRESS: feed=%d, userid=%s, subnet=%s, seq=%d", feedid, (userid == null)?"":userid, (subnet == null)?"":subnet, seq);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Insertable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Insertable.java
deleted file mode 100644
index 6604ab41..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Insertable.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-
-/**
- * An object that can be INSERT-ed into the database.
- * @author Robert Eby
- * @version $Id: Insertable.java,v 1.2 2013/05/29 14:44:36 eby Exp $
- */
-public interface Insertable {
- /**
- * Insert this object into the DB.
- * @param c the JDBC Connection to use
- * @return true if the INSERT succeeded, false otherwise
- */
- public boolean doInsert(Connection c);
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/JSONable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/JSONable.java
deleted file mode 100644
index cbea9ad0..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/JSONable.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import org.json.JSONObject;
-
-/**
- * An object that can be represented as a {@link JSONObject}.
- * @author Robert Eby
- * @version $Id: JSONable.java,v 1.1 2013/04/26 21:00:26 eby Exp $
- */
-public interface JSONable {
- /**
- * Get a JSONObject representing this object.
- * @return the JSONObject
- */
- public JSONObject asJSONObject();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LOGJSONable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LOGJSONable.java
deleted file mode 100644
index 93cdfaa9..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LOGJSONable.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import org.json.LOGJSONObject;
-
-/**
- * An object that can be represented as a {@link JSONObject}.
- * @author Robert Eby
- * @version $Id: JSONable.java,v 1.1 2013/04/26 21:00:26 eby Exp $
- */
-public interface LOGJSONable {
- /**
- * Get a JSONObject representing this object.
- * @return the JSONObject
- */
- public LOGJSONObject asJSONObject();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Loadable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Loadable.java
deleted file mode 100644
index 3676f450..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Loadable.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-import com.att.research.datarouter.provisioning.utils.LogfileLoader;
-
-/**
- * This interface is used by bean classes that can be loaded into the LOG_RECORDS table using the
- * PreparedStatement at {@link LogfileLoader}.INSERT_SQL.
- *
- * @author Robert Eby
- * @version $Id: Loadable.java,v 1.2 2013/08/06 13:28:33 eby Exp $
- */
-public interface Loadable {
- /**
- * Load the 18 fields in the PreparedStatement ps . The fields are:
- *
- * type (String)
- * event_time (long)
- * publish ID (String)
- * feed ID (int)
- * request URI (String)
- * method (String)
- * content type (String)
- * content length (long)
- * feed File ID (String)
- * remote address (String)
- * user (String)
- * status (int)
- * delivery subscriber id (int)
- * delivery File ID (String)
- * result (int)
- * attempts (int)
- * reason (String)
- * record ID (long)
- *
- * @param ps the PreparedStatement to load
- */
- public void load(PreparedStatement ps) throws SQLException;
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LogRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LogRecord.java
deleted file mode 100644
index 1ddc5094..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/LogRecord.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.sql.Types;
-import java.text.ParseException;
-import java.util.Iterator;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.RLEBitSet;
-
-/**
- * The representation of a Log Record, as retrieved from the DB. Since this record format is only used
- * to replicate between provisioning servers, it is very bare-bones; e.g. there are no field setters and only 1 getter.
- * @author Robert Eby
- * @version $Id: LogRecord.java,v 1.7 2014/03/12 19:45:41 eby Exp $
- */
-public class LogRecord extends BaseLogRecord {
- /**
- * Print all log records whose RECORD_IDs are in the bit set provided.
- * @param os the {@link OutputStream} to print the records on
- * @param bs the {@link RLEBitSet} listing the record IDs to print
- * @throws IOException
- */
- public static void printLogRecords(OutputStream os, RLEBitSet bs) throws IOException {
- final String sql = "select * from LOG_RECORDS where RECORD_ID >= ? AND RECORD_ID <= ?";
- DB db = new DB();
- Connection conn = null;
- try {
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- Iterator iter = bs.getRangeIterator();
- PreparedStatement ps = conn.prepareStatement(sql);
- while (iter.hasNext()) {
- Long[] n = iter.next();
- ps.setLong(1, n[0]);
- ps.setLong(2, n[1]);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- LogRecord lr = new LogRecord(rs);
- os.write(lr.toString().getBytes());
- }
- rs.close();
- ps.clearParameters();
- }
- ps.close();
- stmt.close();
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- if (conn != null)
- db.release(conn);
- }
- }
-
- private final String type;
- private final String feedFileid;
- private final String remoteAddr;
- private final String user;
- private final int status;
- private final int subid;
- private final String fileid;
- private final int result;
- private final int attempts;
- private final String reason;
- private final long record_id;
- private final long clength2;
-
- public LogRecord(ResultSet rs) throws SQLException {
- super(rs);
- this.type = rs.getString("TYPE");
- this.feedFileid = rs.getString("FEED_FILEID");
- this.remoteAddr = rs.getString("REMOTE_ADDR");
- this.user = rs.getString("USER");
- this.status = rs.getInt("STATUS");
-
- this.subid = rs.getInt("DELIVERY_SUBID");
- this.fileid = rs.getString("DELIVERY_FILEID");
- this.result = rs.getInt("RESULT");
-
- this.attempts = rs.getInt("ATTEMPTS");
- this.reason = rs.getString("REASON");
-
- this.record_id = rs.getLong("RECORD_ID");
- this.clength2 = rs.getLong("CONTENT_LENGTH_2");
- }
- public LogRecord(String[] pp) throws ParseException {
- super(pp);
- this.type = pp[8];
- this.feedFileid = pp[9];
- this.remoteAddr = pp[10];
- this.user = pp[11];
- this.status = Integer.parseInt(pp[12]);
-
- this.subid = Integer.parseInt(pp[13]);
- this.fileid = pp[14];
- this.result = Integer.parseInt(pp[15]);
-
- this.attempts = Integer.parseInt(pp[16]);
- this.reason = pp[17];
-
- this.record_id = Long.parseLong(pp[18]);
- this.clength2 = (pp.length == 20) ? Long.parseLong(pp[19]) : 0;
- }
-
- public long getRecordId() {
- return record_id;
- }
-
- @Override
- public String toString() {
- return
- sdf.format(getEventTime()) + "|"
- + "LOG|"
- + getPublishId() + "|"
- + getFeedid() + "|"
- + getRequestUri() + "|"
- + getMethod() + "|"
- + getContentType() + "|"
- + getContentLength() + "|"
- + type + "|"
- + feedFileid + "|"
- + remoteAddr + "|"
- + user + "|"
- + status + "|"
- + subid + "|"
- + fileid + "|"
- + result + "|"
- + attempts + "|"
- + reason + "|"
- + record_id + "|"
- + clength2
- + "\n";
- }
-
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, type);
- super.load(ps); // loads fields 2-8
- if (type.equals("pub")) {
- ps.setString(9, feedFileid);
- ps.setString(10, remoteAddr);
- ps.setString(11, user);
- ps.setInt (12, status);
- ps.setNull (13, Types.INTEGER);
- ps.setNull (14, Types.VARCHAR);
- ps.setNull (15, Types.INTEGER);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setLong (18, record_id);
- ps.setNull (19, Types.BIGINT);
- } else if (type.equals("del")) {
- ps.setNull (9, Types.VARCHAR);
- ps.setNull (10, Types.VARCHAR);
- ps.setString(11, user);
- ps.setNull (12, Types.INTEGER);
- ps.setInt (13, subid);
- ps.setString(14, fileid);
- ps.setInt (15, result);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setLong (18, record_id);
- ps.setNull (19, Types.BIGINT);
- } else if (type.equals("exp")) {
- ps.setNull (9, Types.VARCHAR);
- ps.setNull (10, Types.VARCHAR);
- ps.setNull (11, Types.VARCHAR);
- ps.setNull (12, Types.INTEGER);
- ps.setInt (13, subid);
- ps.setString(14, fileid);
- ps.setNull (15, Types.INTEGER);
- ps.setInt (16, attempts);
- ps.setString(17, reason);
- ps.setLong (18, record_id);
- ps.setNull (19, Types.BIGINT);
- } else if (type.equals("pbf")) {
- ps.setString( 9, feedFileid);
- ps.setString(10, remoteAddr);
- ps.setString(11, user);
- ps.setNull (12, Types.INTEGER);
- ps.setNull (13, Types.INTEGER);
- ps.setNull (14, Types.VARCHAR);
- ps.setNull (15, Types.INTEGER);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setLong (18, record_id);
- ps.setLong (19, clength2);
- } else if (type.equals("dlx")) {
- ps.setNull ( 9, Types.VARCHAR);
- ps.setNull (10, Types.VARCHAR);
- ps.setNull (11, Types.VARCHAR);
- ps.setNull (12, Types.INTEGER);
- ps.setInt (13, subid);
- ps.setNull (14, Types.VARCHAR);
- ps.setNull (15, Types.INTEGER);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setLong (18, record_id);
- ps.setLong (19, clength2);
- }
- }
-
- public static void main(String[] a) throws IOException {
- LogRecord.printLogRecords(System.out, new RLEBitSet(a[0]));
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NetworkRoute.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NetworkRoute.java
deleted file mode 100644
index 59f21928..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NetworkRoute.java
+++ /dev/null
@@ -1,230 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * The representation of one route in the Network Route Table.
- *
- * @author Robert P. Eby
- * @version $Id: NetworkRoute.java,v 1.2 2013/12/16 20:30:23 eby Exp $
- */
-public class NetworkRoute extends NodeClass implements Comparable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private final int fromnode;
- private final int tonode;
- private final int vianode;
-
- /**
- * Get a set of all Network Routes in the DB. The set is sorted according to the natural sorting order
- * of the routes (based on the from and to node names in each route).
- * @return the sorted set
- */
- public static SortedSet getAllNetworkRoutes() {
- SortedSet set = new TreeSet();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select FROMNODE, TONODE, VIANODE from NETWORK_ROUTES");
- while (rs.next()) {
- int fromnode = rs.getInt("FROMNODE");
- int tonode = rs.getInt("TONODE");
- int vianode = rs.getInt("VIANODE");
- set.add(new NetworkRoute(fromnode, tonode, vianode));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return set;
- }
-
- public NetworkRoute(String fromnode, String tonode) throws IllegalArgumentException {
- this.fromnode = lookupNodeName(fromnode);
- this.tonode = lookupNodeName(tonode);
- this.vianode = -1;
- }
-
- public NetworkRoute(String fromnode, String tonode, String vianode) throws IllegalArgumentException {
- this.fromnode = lookupNodeName(fromnode);
- this.tonode = lookupNodeName(tonode);
- this.vianode = lookupNodeName(vianode);
- }
-
- public NetworkRoute(JSONObject jo) throws IllegalArgumentException {
- this.fromnode = lookupNodeName(jo.getString("from"));
- this.tonode = lookupNodeName(jo.getString("to"));
- this.vianode = lookupNodeName(jo.getString("via"));
- }
-
- public NetworkRoute(int fromnode, int tonode, int vianode) throws IllegalArgumentException {
- this.fromnode = fromnode;
- this.tonode = tonode;
- this.vianode = vianode;
- }
-
- public int getFromnode() {
- return fromnode;
- }
-
- public int getTonode() {
- return tonode;
- }
-
- public int getVianode() {
- return vianode;
- }
-
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "delete from NETWORK_ROUTES where FROMNODE = ? AND TONODE = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, fromnode);
- ps.setInt(2, tonode);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = false;
- if (this.vianode >= 0) {
- PreparedStatement ps = null;
- try {
- // Create the NETWORK_ROUTES row
- String sql = "insert into NETWORK_ROUTES (FROMNODE, TONODE, VIANODE) values (?, ?, ?)";
- ps = c.prepareStatement(sql);
- ps.setInt(1, this.fromnode);
- ps.setInt(2, this.tonode);
- ps.setInt(3, this.vianode);
- ps.execute();
- ps.close();
- rv = true;
- } catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- }
- return rv;
- }
-
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "update NETWORK_ROUTES set VIANODE = ? where FROMNODE = ? and TONODE = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, vianode);
- ps.setInt(2, fromnode);
- ps.setInt(3, tonode);
- ps.executeUpdate();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("from", lookupNodeID(fromnode));
- jo.put("to", lookupNodeID(tonode));
- jo.put("via", lookupNodeID(vianode));
- return jo;
- }
-
- @Override
- public String getKey() {
- return lookupNodeID(fromnode)+":"+lookupNodeID(tonode);
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof NetworkRoute))
- return false;
- NetworkRoute on = (NetworkRoute)obj;
- return (fromnode == on.fromnode) && (tonode == on.tonode) && (vianode == on.vianode);
- }
-
- @Override
- public int compareTo(NetworkRoute o) {
- if (this.fromnode == o.fromnode) {
- if (this.tonode == o.tonode)
- return this.vianode - o.vianode;
- return this.tonode - o.tonode;
- }
- return this.fromnode - o.fromnode;
- }
-
- @Override
- public String toString() {
- return String.format("NETWORK: from=%d, to=%d, via=%d", fromnode, tonode, vianode);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NodeClass.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NodeClass.java
deleted file mode 100644
index 321885b4..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/NodeClass.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.apache.log4j.Logger;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * This class is used to aid in the mapping of node names from/to node IDs.
- *
- * @author Robert P. Eby
- * @version $Id: NodeClass.java,v 1.2 2014/01/15 16:08:43 eby Exp $
- */
-public abstract class NodeClass extends Syncable {
- private static Map map;
-
- public NodeClass() {
- // init on first use
- if (map == null) {
- reload();
- }
- }
-
- /**
- * Add nodes to the NODES table, when the NODES parameter value is changed.
- * Nodes are only added to the table, they are never deleted. The node name is normalized
- * to contain the domain (if missing).
- * @param nodes a pipe separated list of the current nodes
- */
- public static void setNodes(String[] nodes) {
- if (map == null)
- reload();
- int nextid = 0;
- for (Integer n : map.values()) {
- if (n >= nextid)
- nextid = n+1;
- }
- // take | separated list, add domain if needed.
- Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- for (String node : nodes) {
- node = normalizeNodename(node);
- if (!map.containsKey(node)) {
- intlogger.info("..adding "+node+" to NODES with index "+nextid);
- map.put(node, nextid);
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- ps = conn.prepareStatement("insert into NODES (NODEID, NAME, ACTIVE) values (?, ?, 1)");
- ps.setInt(1, nextid);
- ps.setString(2, node);
- ps.execute();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- nextid++;
- }
- }
- }
-
- public static void reload() {
- Map m = new HashMap();
- PreparedStatement ps = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- String sql = "select NODEID, NAME from NODES";
- ps = conn.prepareStatement(sql);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- int id = rs.getInt("NODEID");
- String name = rs.getString("NAME");
- m.put(name, id);
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- map = m;
- }
-
- public static Integer lookupNodeName(final String name) throws IllegalArgumentException {
- Integer n = map.get(name);
- if (n == null)
- throw new IllegalArgumentException("Invalid node name: "+name);
- return n;
- }
-
- public static Collection lookupNodeNames(String patt) throws IllegalArgumentException {
- Collection coll = new TreeSet();
- final Set keyset = map.keySet();
- for (String s : patt.toLowerCase().split(",")) {
- if (s.endsWith("*")) {
- s = s.substring(0, s.length()-1);
- for (String s2 : keyset) {
- if (s2.startsWith(s))
- coll.add(s2);
- }
- } else if (keyset.contains(s)) {
- coll.add(s);
- } else if (keyset.contains(normalizeNodename(s))) {
- coll.add(normalizeNodename(s));
- } else {
- throw new IllegalArgumentException("Invalid node name: "+s);
- }
- }
- return coll;
- }
-
- protected String lookupNodeID(int n) {
- for (String s : map.keySet()) {
- if (map.get(s) == n)
- return s;
- }
- return null;
- }
-
- public static String normalizeNodename(String s) {
- if (s != null && s.indexOf('.') <= 0) {
- Parameters p = Parameters.getParameter(Parameters.PROV_DOMAIN);
- if (p != null) {
- String domain = p.getValue();
- s += "." + domain;
- }
- }
- return s.toLowerCase();
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Parameters.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Parameters.java
deleted file mode 100644
index 1cb4bcad..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Parameters.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Methods to provide access to Provisioning parameters in the DB.
- * This class also provides constants of the standard parameters used by the Data Router.
- * @author Robert Eby
- * @version $Id: Parameters.java,v 1.11 2014/03/12 19:45:41 eby Exp $
- */
-public class Parameters extends Syncable {
- public static final String PROV_REQUIRE_SECURE = "PROV_REQUIRE_SECURE";
- public static final String PROV_REQUIRE_CERT = "PROV_REQUIRE_CERT";
- public static final String PROV_AUTH_ADDRESSES = "PROV_AUTH_ADDRESSES";
- public static final String PROV_AUTH_SUBJECTS = "PROV_AUTH_SUBJECTS";
- public static final String PROV_NAME = "PROV_NAME";
- public static final String PROV_ACTIVE_NAME = "PROV_ACTIVE_NAME";
- public static final String PROV_DOMAIN = "PROV_DOMAIN";
- public static final String PROV_MAXFEED_COUNT = "PROV_MAXFEED_COUNT";
- public static final String PROV_MAXSUB_COUNT = "PROV_MAXSUB_COUNT";
- public static final String PROV_POKETIMER1 = "PROV_POKETIMER1";
- public static final String PROV_POKETIMER2 = "PROV_POKETIMER2";
- public static final String PROV_SPECIAL_SUBNET = "PROV_SPECIAL_SUBNET";
- public static final String PROV_LOG_RETENTION = "PROV_LOG_RETENTION";
- public static final String NODES = "NODES";
- public static final String ACTIVE_POD = "ACTIVE_POD";
- public static final String STANDBY_POD = "STANDBY_POD";
- public static final String LOGROLL_INTERVAL = "LOGROLL_INTERVAL";
- public static final String DELIVERY_INIT_RETRY_INTERVAL = "DELIVERY_INIT_RETRY_INTERVAL";
- public static final String DELIVERY_MAX_RETRY_INTERVAL = "DELIVERY_MAX_RETRY_INTERVAL";
- public static final String DELIVERY_RETRY_RATIO = "DELIVERY_RETRY_RATIO";
- public static final String DELIVERY_MAX_AGE = "DELIVERY_MAX_AGE";
- public static final String THROTTLE_FILTER = "THROTTLE_FILTER";
- public static final String STATIC_ROUTING_NODES = "STATIC_ROUTING_NODES"; //Adding new param for static Routing - Rally:US664862-1610
-
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
-
- private String keyname;
- private String value;
-
- /**
- * Get all parameters in the DB as a Map.
- * @return the Map of keynames/values from the DB.
- */
- public static Map getParameters() {
- Map props = new HashMap();
- for (Parameters p : getParameterCollection()) {
- props.put(p.getKeyname(), p.getValue());
- }
- return props;
- }
- public static Collection getParameterCollection() {
- Collection coll = new ArrayList();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- String sql = "select * from PARAMETERS";
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- Parameters p = new Parameters(rs);
- coll.add(p);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return coll;
- }
- /**
- * Get a specific parameter value from the DB.
- * @param k the key to lookup
- * @return the value, or null if non-existant
- */
- public static Parameters getParameter(String k) {
- Parameters v = null;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- String sql = "select KEYNAME, VALUE from PARAMETERS where KEYNAME = \"" + k + "\"";
- ResultSet rs = stmt.executeQuery(sql);
- if (rs.next()) {
- v = new Parameters(rs);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return v;
- }
-
- public Parameters() {
- this("", "");
- }
- public Parameters(String k, String v) {
- this.keyname = k;
- this.value = v;
- }
- public Parameters(ResultSet rs) throws SQLException {
- this.keyname = rs.getString("KEYNAME");
- this.value = rs.getString("VALUE");
- }
- public String getKeyname() {
- return keyname;
- }
- public void setKeyname(String keyname) {
- this.keyname = keyname;
- }
- public String getValue() {
- return value;
- }
- public void setValue(String value) {
- this.value = value;
- }
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("keyname", keyname);
- jo.put("value", value);
- return jo;
- }
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- // Create the SUBSCRIPTIONS row
- String sql = "insert into PARAMETERS values (?, ?)";
- ps = c.prepareStatement(sql);
- ps.setString(1, getKeyname());
- ps.setString(2, getValue());
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- // Update the PARAMETERS row
- String sql = "update PARAMETERS set VALUE = ? where KEYNAME = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, getValue());
- ps.setString(2, getKeyname());
- ps.executeUpdate();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- // Create the SUBSCRIPTIONS row
- String sql = "delete from PARAMETERS where KEYNAME = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, getKeyname());
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public String getKey() {
- return getKeyname();
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof Parameters))
- return false;
- Parameters of = (Parameters) obj;
- if (!keyname.equals(of.keyname))
- return false;
- if (!value.equals(of.value))
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "PARAM: keyname=" + keyname + ", value=" + value;
- }
-}
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PubFailRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PubFailRecord.java
deleted file mode 100644
index 1fe1473b..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PubFailRecord.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.text.ParseException;
-
-/**
- * The representation of a Publish Failure (PBF) Record, as retrieved from the DB.
- * @author Robert Eby
- * @version $Id: PubFailRecord.java,v 1.1 2013/10/28 18:06:53 eby Exp $
- */
-public class PubFailRecord extends BaseLogRecord {
- private long contentLengthReceived;
- private String sourceIP;
- private String user;
- private String error;
-
- public PubFailRecord(String[] pp) throws ParseException {
- super(pp);
- this.contentLengthReceived = Long.parseLong(pp[8]);
- this.sourceIP = pp[9];
- this.user = pp[10];
- this.error = pp[11];
- }
- public PubFailRecord(ResultSet rs) throws SQLException {
- super(rs);
- // Note: because this record should be "rare" these fields are mapped to unconventional fields in the DB
- this.contentLengthReceived = rs.getLong("CONTENT_LENGTH_2");
- this.sourceIP = rs.getString("REMOTE_ADDR");
- this.user = rs.getString("USER");
- this.error = rs.getString("FEED_FILEID");
- }
- public long getContentLengthReceived() {
- return contentLengthReceived;
- }
- public String getSourceIP() {
- return sourceIP;
- }
- public String getUser() {
- return user;
- }
- public String getError() {
- return error;
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, "pbf"); // field 1: type
- super.load(ps); // loads fields 2-8
- ps.setString( 9, getError());
- ps.setString(10, getSourceIP());
- ps.setString(11, getUser());
- ps.setNull (12, Types.INTEGER);
- ps.setNull (13, Types.INTEGER);
- ps.setNull (14, Types.VARCHAR);
- ps.setNull (15, Types.INTEGER);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setLong (19, getContentLengthReceived());
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PublishRecord.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PublishRecord.java
deleted file mode 100644
index a844c768..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/PublishRecord.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-import java.text.ParseException;
-import java.util.LinkedHashMap;
-
-import org.json.LOGJSONObject;
-
-/**
- * The representation of a Publish Record, as retrieved from the DB.
- * @author Robert Eby
- * @version $Id: PublishRecord.java,v 1.6 2013/10/28 18:06:53 eby Exp $
- */
-public class PublishRecord extends BaseLogRecord {
- private String feedFileid;
- private String remoteAddr;
- private String user;
- private int status;
-
- public PublishRecord(String[] pp) throws ParseException {
- super(pp);
-// This is too slow!
-// Matcher m = Pattern.compile(".*/publish/(\\d+)/(.*)$").matcher(pp[4]);
-// if (!m.matches())
-// throw new ParseException("bad pattern", 0);
-// this.feedFileid = m.group(2);
- int ix = pp[4].indexOf("/publish/");
- if (ix < 0)
- throw new ParseException("bad pattern", 0);
- ix = pp[4].indexOf('/', ix+9);
- if (ix < 0)
- throw new ParseException("bad pattern", 0);
- this.feedFileid = pp[4].substring(ix+1);
- this.remoteAddr = pp[8];
- this.user = pp[9];
- this.status = Integer.parseInt(pp[10]);
- }
- public PublishRecord(ResultSet rs) throws SQLException {
- super(rs);
- this.feedFileid = rs.getString("FEED_FILEID");
- this.remoteAddr = rs.getString("REMOTE_ADDR");
- this.user = rs.getString("USER");
- this.status = rs.getInt("STATUS");
- }
- public String getFeedFileid() {
- return feedFileid;
- }
-
- public void setFeedFileid(String feedFileid) {
- this.feedFileid = feedFileid;
- }
-
- public String getRemoteAddr() {
- return remoteAddr;
- }
-
- public void setRemoteAddr(String remoteAddr) {
- this.remoteAddr = remoteAddr;
- }
-
- public String getUser() {
- return user;
- }
-
- public void setUser(String user) {
- this.user = user;
- }
-
- public int getStatus() {
- return status;
- }
-
- public void setStatus(int status) {
- this.status = status;
- }
-
-
- public LOGJSONObject reOrderObject(LOGJSONObject jo) {
- LinkedHashMap logrecordObj = new LinkedHashMap();
-
-
- logrecordObj.put("statusCode", jo.get("statusCode"));
- logrecordObj.put("publishId", jo.get("publishId"));
- logrecordObj.put("requestURI", jo.get("requestURI"));
- logrecordObj.put("sourceIP", jo.get("sourceIP"));
- logrecordObj.put("method", jo.get("method"));
- logrecordObj.put("contentType", jo.get("contentType"));
- logrecordObj.put("endpointId", jo.get("endpointId"));
- logrecordObj.put("type", jo.get("type"));
- logrecordObj.put("date", jo.get("date"));
- logrecordObj.put("contentLength", jo.get("contentLength"));
-
- LOGJSONObject newjo = new LOGJSONObject(logrecordObj);
- return newjo;
- }
-
- @Override
- public LOGJSONObject asJSONObject() {
- LOGJSONObject jo = super.asJSONObject();
- jo.put("type", "pub");
-// jo.put("feedFileid", feedFileid);
-// jo.put("remoteAddr", remoteAddr);
-// jo.put("user", user);
- jo.put("sourceIP", remoteAddr);
- jo.put("endpointId", user);
- jo.put("statusCode", status);
-
- LOGJSONObject newjo = this.reOrderObject(jo);
-
- return newjo;
- }
- @Override
- public void load(PreparedStatement ps) throws SQLException {
- ps.setString(1, "pub"); // field 1: type
- super.load(ps); // loads fields 2-8
- ps.setString( 9, getFeedFileid());
- ps.setString(10, getRemoteAddr());
- ps.setString(11, getUser());
- ps.setInt (12, getStatus());
- ps.setNull (13, Types.INTEGER);
- ps.setNull (14, Types.VARCHAR);
- ps.setNull (15, Types.INTEGER);
- ps.setNull (16, Types.INTEGER);
- ps.setNull (17, Types.VARCHAR);
- ps.setNull (19, Types.BIGINT);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubDelivery.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubDelivery.java
deleted file mode 100644
index 66e44af5..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubDelivery.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-
-import org.json.JSONObject;
-
-/**
- * The representation of Subscription delivery information. This includes the URL to deliver to,
- * login and password, and whether to use the "HTTP 100-continue" feature for this subscription.
- * @author Robert Eby
- * @version $Id: SubDelivery.java,v 1.2 2013/06/20 14:11:05 eby Exp $
- */
-public class SubDelivery implements JSONable {
- private String url;
- private String user;
- private String password;
- private boolean use100;
-
- public SubDelivery() {
- this("", "", "", false);
- }
- public SubDelivery(String url, String user, String password, boolean use100) {
- this.url = url;
- this.user = user;
- this.password = password;
- this.use100 = use100;
- }
- public SubDelivery(ResultSet rs) throws SQLException {
- this.url = rs.getString("DELIVERY_URL");
- this.user = rs.getString("DELIVERY_USER");
- this.password = rs.getString("DELIVERY_PASSWORD");
- this.use100 = rs.getBoolean("DELIVERY_USE100");
-
- }
- public String getUrl() {
- return url;
- }
- public void setUrl(String url) {
- this.url = url;
- }
- public String getUser() {
- return user;
- }
- public void setUser(String user) {
- this.user = user;
- }
- public String getPassword() {
- return password;
- }
- public void setPassword(String password) {
- this.password = password;
- }
-
- public boolean isUse100() {
- return use100;
- }
- public void setUse100(boolean use100) {
- this.use100 = use100;
- }
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("url", url);
- jo.put("user", user);
- jo.put("password", password);
- jo.put("use100", use100);
- return jo;
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof SubDelivery))
- return false;
- SubDelivery os = (SubDelivery) obj;
- if (!url.equals(os.url))
- return false;
- if (!user.equals(os.user))
- return false;
- if (!password.equals(os.password))
- return false;
- if (use100 != os.use100)
- return false;
- return true;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubLinks.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubLinks.java
deleted file mode 100644
index 27128d86..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/SubLinks.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.InvalidObjectException;
-
-import org.json.JSONObject;
-
-/**
- * The URLs associated with a Subscription.
- * @author Robert Eby
- * @version $Id: SubLinks.java,v 1.3 2013/07/05 13:48:05 eby Exp $
- */
-public class SubLinks implements JSONable {
- private String self;
- private String feed;
- private String log;
-
- public SubLinks() {
- self = feed = log = null;
- }
- public SubLinks(JSONObject jo) throws InvalidObjectException {
- this();
- self = jo.getString("self");
- feed = jo.getString("feed");
- log = jo.getString("log");
- }
- public SubLinks(String self, String feed, String log) {
- this.self = self;
- this.feed = feed;
- this.log = log;
- }
- public String getSelf() {
- return self;
- }
- public void setSelf(String self) {
- this.self = self;
- }
- public String getFeed() {
- return feed;
- }
- public void setFeed(String feed) {
- this.feed = feed;
- }
- public String getLog() {
- return log;
- }
- public void setLog(String log) {
- this.log = log;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("self", self);
- jo.put("feed", feed);
- jo.put("log", log);
- return jo;
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof SubLinks))
- return false;
- SubLinks os = (SubLinks) obj;
- if (!self.equals(os.self))
- return false;
- if (!feed.equals(os.feed))
- return false;
- if (!log.equals(os.log))
- return false;
- return true;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Subscription.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Subscription.java
deleted file mode 100644
index 7ab10a45..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Subscription.java
+++ /dev/null
@@ -1,511 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.io.InvalidObjectException;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.json.JSONObject;
-import java.util.Properties;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-import com.att.research.datarouter.provisioning.utils.URLUtilities;
-
-/**
- * The representation of a Subscription. Subscriptions can be retrieved from the DB, or stored/updated in the DB.
- * @author Robert Eby
- * @version $Id: Subscription.java,v 1.9 2013/10/28 18:06:53 eby Exp $
- */
-public class Subscription extends Syncable {
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private static int next_subid = getMaxSubID() + 1;
-
- private int subid;
- private int feedid;
- private int groupid; //New field is added - Groups feature Rally:US708115 - 1610
- private SubDelivery delivery;
- private boolean metadataOnly;
- private String subscriber;
- private SubLinks links;
- private boolean suspended;
- private Date last_mod;
- private Date created_date;
-
- public static Subscription getSubscriptionMatching(Subscription sub) {
- SubDelivery deli = sub.getDelivery();
- String sql = String.format(
- "select * from SUBSCRIPTIONS where FEEDID = %d and DELIVERY_URL = \"%s\" and DELIVERY_USER = \"%s\" and DELIVERY_PASSWORD = \"%s\" and DELIVERY_USE100 = %d and METADATA_ONLY = %d",
- sub.getFeedid(),
- deli.getUrl(),
- deli.getUser(),
- deli.getPassword(),
- deli.isUse100() ? 1 : 0,
- sub.isMetadataOnly() ? 1 : 0
- );
- List list = getSubscriptionsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
- public static Subscription getSubscriptionById(int id) {
- String sql = "select * from SUBSCRIPTIONS where SUBID = " + id;
- List list = getSubscriptionsForSQL(sql);
- return list.size() > 0 ? list.get(0) : null;
- }
- public static Collection getAllSubscriptions() {
- return getSubscriptionsForSQL("select * from SUBSCRIPTIONS");
- }
- private static List getSubscriptionsForSQL(String sql) {
- List list = new ArrayList();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- Subscription sub = new Subscription(rs);
- list.add(sub);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return list;
- }
- public static int getMaxSubID() {
- int max = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select MAX(subid) from SUBSCRIPTIONS");
- if (rs.next()) {
- max = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.info("getMaxSubID: "+e.getMessage());
- e.printStackTrace();
- }
- return max;
- }
- public static Collection getSubscriptionUrlList(int feedid) {
- List list = new ArrayList();
- String sql = "select SUBID from SUBSCRIPTIONS where FEEDID = "+feedid;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery(sql);
- while (rs.next()) {
- int subid = rs.getInt("SUBID");
- list.add(URLUtilities.generateSubscriptionURL(subid));
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- return list;
- }
- /**
- * Return a count of the number of active subscriptions in the DB.
- * @return the count
- */
- public static int countActiveSubscriptions() {
- int count = 0;
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("select count(*) from SUBSCRIPTIONS");
- if (rs.next()) {
- count = rs.getInt(1);
- }
- rs.close();
- stmt.close();
- db.release(conn);
- } catch (SQLException e) {
- intlogger.warn("PROV0008 countActiveSubscriptions: "+e.getMessage());
- e.printStackTrace();
- }
- return count;
- }
-
- public Subscription() {
- this("", "", "");
- }
- public Subscription(String url, String user, String password) {
- this.subid = -1;
- this.feedid = -1;
- this.groupid = -1; //New field is added - Groups feature Rally:US708115 - 1610
- this.delivery = new SubDelivery(url, user, password, false);
- this.metadataOnly = false;
- this.subscriber = "";
- this.links = new SubLinks();
- this.suspended = false;
- this.last_mod = new Date();
- this.created_date = new Date();
- }
- public Subscription(ResultSet rs) throws SQLException {
- this.subid = rs.getInt("SUBID");
- this.feedid = rs.getInt("FEEDID");
- this.groupid = rs.getInt("GROUPID"); //New field is added - Groups feature Rally:US708115 - 1610
- this.delivery = new SubDelivery(rs);
- this.metadataOnly = rs.getBoolean("METADATA_ONLY");
- this.subscriber = rs.getString("SUBSCRIBER");
- this.links = new SubLinks(rs.getString("SELF_LINK"), URLUtilities.generateFeedURL(feedid), rs.getString("LOG_LINK"));
- this.suspended = rs.getBoolean("SUSPENDED");
- this.last_mod = rs.getDate("LAST_MOD");
- this.created_date = rs.getDate("CREATED_DATE");
- }
- public Subscription(JSONObject jo) throws InvalidObjectException {
- this("", "", "");
- try {
- // The JSONObject is assumed to contain a vnd.att-dr.subscription representation
- this.subid = jo.optInt("subid", -1);
- this.feedid = jo.optInt("feedid", -1);
- this.groupid = jo.optInt("groupid", -1); //New field is added - Groups feature Rally:US708115 - 1610
-
- JSONObject jdeli = jo.getJSONObject("delivery");
- String url = jdeli.getString("url");
- String user = jdeli.getString("user");
- String password = jdeli.getString("password");
- boolean use100 = jdeli.getBoolean("use100");
-
-
- //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
- Properties p = (new DB()).getProperties();
- if(p.get("com.att.research.datarouter.provserver.https.relaxation").toString().equals("false") && !jo.has("sync")) {
- if (!url.startsWith("https://"))
- throw new InvalidObjectException("delivery URL is not HTTPS");
- }
-
- if (url.length() > 256)
- throw new InvalidObjectException("delivery url field is too long");
- if (user.length() > 20)
- throw new InvalidObjectException("delivery user field is too long");
- if (password.length() > 32)
- throw new InvalidObjectException("delivery password field is too long");
- this.delivery = new SubDelivery(url, user, password, use100);
-
- this.metadataOnly = jo.getBoolean("metadataOnly");
- this.suspended = jo.optBoolean("suspend", false);
-
- this.subscriber = jo.optString("subscriber", "");
- JSONObject jol = jo.optJSONObject("links");
- this.links = (jol == null) ? (new SubLinks()) : (new SubLinks(jol));
- } catch (InvalidObjectException e) {
- throw e;
- } catch (Exception e) {
- throw new InvalidObjectException("invalid JSON: "+e.getMessage());
- }
- }
- public int getSubid() {
- return subid;
- }
- public void setSubid(int subid) {
- this.subid = subid;
-
- // Create link URLs
- SubLinks sl = getLinks();
- sl.setSelf(URLUtilities.generateSubscriptionURL(subid));
- sl.setLog(URLUtilities.generateSubLogURL(subid));
- }
- public int getFeedid() {
- return feedid;
- }
- public void setFeedid(int feedid) {
- this.feedid = feedid;
-
- // Create link URLs
- SubLinks sl = getLinks();
- sl.setFeed(URLUtilities.generateFeedURL(feedid));
- }
-
- //New getter setters for Groups feature Rally:US708115 - 1610
- public int getGroupid() {
- return groupid;
- }
- public void setGroupid(int groupid) {
- this.groupid = groupid;
- }
-
- public SubDelivery getDelivery() {
- return delivery;
- }
- public void setDelivery(SubDelivery delivery) {
- this.delivery = delivery;
- }
- public boolean isMetadataOnly() {
- return metadataOnly;
- }
- public void setMetadataOnly(boolean metadataOnly) {
- this.metadataOnly = metadataOnly;
- }
- public boolean isSuspended() {
- return suspended;
- }
- public void setSuspended(boolean suspended) {
- this.suspended = suspended;
- }
- public String getSubscriber() {
- return subscriber;
- }
- public void setSubscriber(String subscriber) {
- if (subscriber != null) {
- if (subscriber.length() > 8)
- subscriber = subscriber.substring(0, 8);
- this.subscriber = subscriber;
- }
- }
- public SubLinks getLinks() {
- return links;
- }
- public void setLinks(SubLinks links) {
- this.links = links;
- }
-
- @Override
- public JSONObject asJSONObject() {
- JSONObject jo = new JSONObject();
- jo.put("subid", subid);
- jo.put("feedid", feedid);
- jo.put("groupid", groupid); //New field is added - Groups feature Rally:US708115 - 1610
- jo.put("delivery", delivery.asJSONObject());
- jo.put("metadataOnly", metadataOnly);
- jo.put("subscriber", subscriber);
- jo.put("links", links.asJSONObject());
- jo.put("suspend", suspended);
- jo.put("last_mod", last_mod.getTime());
- jo.put("created_date", created_date.getTime());
- return jo;
- }
- public JSONObject asLimitedJSONObject() {
- JSONObject jo = asJSONObject();
- jo.remove("subid");
- jo.remove("feedid");
- jo.remove("last_mod");
- return jo;
- }
- public JSONObject asJSONObject(boolean hidepasswords) {
- JSONObject jo = asJSONObject();
- if (hidepasswords) {
- jo.remove("subid"); // we no longer hide passwords, however we do hide these
- jo.remove("feedid");
- jo.remove("last_mod");
- jo.remove("created_date");
- }
- return jo;
- }
- @Override
- public boolean doInsert(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- if (subid == -1) {
- // No feed ID assigned yet, so assign the next available one
- setSubid(next_subid++);
- }
- // In case we insert a feed from synchronization
- if (subid > next_subid)
- next_subid = subid+1;
-
- // Create the SUBSCRIPTIONS row
- String sql = "insert into SUBSCRIPTIONS (SUBID, FEEDID, DELIVERY_URL, DELIVERY_USER, DELIVERY_PASSWORD, DELIVERY_USE100, METADATA_ONLY, SUBSCRIBER, SUSPENDED, GROUPID) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
- ps = c.prepareStatement(sql, new String[] { "SUBID" });
- ps.setInt(1, subid);
- ps.setInt(2, feedid);
- ps.setString(3, getDelivery().getUrl());
- ps.setString(4, getDelivery().getUser());
- ps.setString(5, getDelivery().getPassword());
- ps.setInt(6, getDelivery().isUse100()?1:0);
- ps.setInt(7, isMetadataOnly()?1:0);
- ps.setString(8, getSubscriber());
- ps.setBoolean(9, isSuspended());
- ps.setInt(10, groupid); //New field is added - Groups feature Rally:US708115 - 1610
- ps.execute();
- ps.close();
-// ResultSet rs = ps.getGeneratedKeys();
-// rs.first();
-// setSubid(rs.getInt(1)); // side effect - sets the link URLs
-// ps.close();
-
- // Update the row to set the URLs
- sql = "update SUBSCRIPTIONS set SELF_LINK = ?, LOG_LINK = ? where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, getLinks().getSelf());
- ps.setString(2, getLinks().getLog());
- ps.setInt(3, subid);
- ps.execute();
- ps.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0005 doInsert: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public boolean doUpdate(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "update SUBSCRIPTIONS set DELIVERY_URL = ?, DELIVERY_USER = ?, DELIVERY_PASSWORD = ?, DELIVERY_USE100 = ?, METADATA_ONLY = ?, SUSPENDED = ?, GROUPID = ? where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, delivery.getUrl());
- ps.setString(2, delivery.getUser());
- ps.setString(3, delivery.getPassword());
- ps.setInt(4, delivery.isUse100()?1:0);
- ps.setInt(5, isMetadataOnly()?1:0);
- ps.setInt(6, suspended ? 1 : 0);
- ps.setInt(7, groupid); //New field is added - Groups feature Rally:US708115 - 1610
- ps.setInt(8, subid);
- ps.executeUpdate();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
-
-
- /**Rally US708115
- * Change Ownership of Subscription - 1610
- * */
- public boolean changeOwnerShip() {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
-
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection c = db.getConnection();
- String sql = "update SUBSCRIPTIONS set SUBSCRIBER = ? where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setString(1, this.subscriber);
- ps.setInt(2, subid);
- ps.execute();
- ps.close();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
-
-
-
- @Override
- public boolean doDelete(Connection c) {
- boolean rv = true;
- PreparedStatement ps = null;
- try {
- String sql = "delete from SUBSCRIPTIONS where SUBID = ?";
- ps = c.prepareStatement(sql);
- ps.setInt(1, subid);
- ps.execute();
- } catch (SQLException e) {
- rv = false;
- intlogger.warn("PROV0007 doDelete: "+e.getMessage());
- e.printStackTrace();
- } finally {
- try {
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- }
- }
- return rv;
- }
- @Override
- public String getKey() {
- return ""+getSubid();
- }
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof Subscription))
- return false;
- Subscription os = (Subscription) obj;
- if (subid != os.subid)
- return false;
- if (feedid != os.feedid)
- return false;
- if (groupid != os.groupid) //New field is added - Groups feature Rally:US708115 - 1610
- return false;
- if (!delivery.equals(os.delivery))
- return false;
- if (metadataOnly != os.metadataOnly)
- return false;
- if (!subscriber.equals(os.subscriber))
- return false;
- if (!links.equals(os.links))
- return false;
- if (suspended != os.suspended)
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "SUB: subid=" + subid + ", feedid=" + feedid;
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Syncable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Syncable.java
deleted file mode 100644
index 00163c1c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Syncable.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-
-import org.json.JSONObject;
-
-/**
- * This abstract class defines the "contract" for beans that can be sync-ed with the database,
- * by means of straight comparison. The getKey method is used to return the primary key
- * used to identify a record.
- *
- * @author Robert Eby
- * @version $Id: Syncable.java,v 1.1 2013/07/05 13:48:05 eby Exp $
- */
-public abstract class Syncable implements Deleteable, Insertable, Updateable, JSONable {
- @Override
- abstract public JSONObject asJSONObject();
-
- @Override
- abstract public boolean doUpdate(Connection c);
-
- @Override
- abstract public boolean doInsert(Connection c);
-
- @Override
- abstract public boolean doDelete(Connection c);
-
- /**
- * Get the "natural key" for this object type, as a String.
- * @return the key
- */
- abstract public String getKey();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Updateable.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Updateable.java
deleted file mode 100644
index a9b19e7c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/Updateable.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.provisioning.beans;
-
-import java.sql.Connection;
-
-/**
- * An object that can be UPDATE-ed in the database.
- * @author Robert Eby
- * @version $Id: Updateable.java,v 1.2 2013/05/29 14:44:36 eby Exp $
- */
-public interface Updateable {
- /**
- * Update this object in the DB.
- * @param c the JDBC Connection to use
- * @return true if the UPDATE succeeded, false otherwise
- */
- public boolean doUpdate(Connection c);
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/package.html
deleted file mode 100644
index 4b28053a..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/beans/package.html
+++ /dev/null
@@ -1,31 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-This package provides beans to represent the basic provisioning objects of the Data Router application.
-These objects are defined by the document Data Router Release 1 Provisioning API Version 1.2 .
-
-
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/EelfMsgs.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/EelfMsgs.java
deleted file mode 100644
index 3a230416..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/EelfMsgs.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-package com.att.research.datarouter.provisioning.eelf;
-
-import com.att.eelf.i18n.EELFResolvableErrorEnum;
-import com.att.eelf.i18n.EELFResourceManager;
-
-public enum EelfMsgs implements EELFResolvableErrorEnum {
-
- /**
- * Application message prints user (accepts one argument)
- */
- MESSAGE_WITH_BEHALF,
-
- /**
- * Application message prints user and FeedID (accepts two arguments)
- */
-
- MESSAGE_WITH_BEHALF_AND_FEEDID,
-
- /**
- * Application message prints user and SUBID (accepts two arguments)
- */
-
- MESSAGE_WITH_BEHALF_AND_SUBID;
-
-
-
- /**
- * Static initializer to ensure the resource bundles for this class are loaded...
- * Here this application loads messages from three bundles
- */
- static {
- EELFResourceManager.loadMessageBundle("EelfMessages");
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/JettyFilter.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/JettyFilter.java
deleted file mode 100644
index cfef910c..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/eelf/JettyFilter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-package com.att.research.datarouter.provisioning.eelf;
-
-import ch.qos.logback.classic.spi.ILoggingEvent;
-import ch.qos.logback.core.filter.Filter;
-import ch.qos.logback.core.spi.FilterReply;
-
-public class JettyFilter extends Filter{
- @Override
- public FilterReply decide(ILoggingEvent event) {
- if (event.getLoggerName().contains("org.eclipse.jetty")) {
- return FilterReply.ACCEPT;
- } else {
- return FilterReply.DENY;
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/package.html
deleted file mode 100644
index 7b009312..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/package.html
+++ /dev/null
@@ -1,123 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-This package provides the servlets used by the provisioning server for the Data Router application.
-URLs are from the document URLs for DR Release 1 Version 1.2 .
-
-
-
-URL Path Summary
-
- URL Path
- Symbolic Name
- Servlet Name
- Allowed Methods
-
-
- /
- <drFeedsUrl>
- {@link com.att.research.datarouter.provisioning.DRFeedsServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /feed/feedid
- <feedUrl>
- {@link com.att.research.datarouter.provisioning.FeedServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /publish/feedid
- <publishUrl>
- {@link com.att.research.datarouter.provisioning.PublishServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /subscribe/feedid
- <subscribeUrl>
- {@link com.att.research.datarouter.provisioning.SubscribeServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /feedlog/feedid
- <feedLogUrl>
- {@link com.att.research.datarouter.provisioning.FeedLogServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /subs/subid
- <subscriptionUrl>
- {@link com.att.research.datarouter.provisioning.SubscriptionServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /sublog/subid
- <subLogUrl>
- {@link com.att.research.datarouter.provisioning.SubLogServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /internal/*
- <internalUrl>
- {@link com.att.research.datarouter.provisioning.InternalServlet}
- DELETE
- GET
- POST
- PUT
-
-
- /internal/route/*
- <routeUrl>
- {@link com.att.research.datarouter.provisioning.RouteServlet}
- DELETE
- GET
- POST
- PUT
-
-
-
-
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DB.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DB.java
deleted file mode 100644
index ec4b0e68..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DB.java
+++ /dev/null
@@ -1,711 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.LineNumberReader;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.NoSuchElementException;
-import java.util.Properties;
-import java.util.Queue;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-
-import com.att.research.datarouter.provisioning.beans.DeliveryRecord;
-import com.att.research.datarouter.provisioning.beans.ExpiryRecord;
-import com.att.research.datarouter.provisioning.beans.Loadable;
-import com.att.research.datarouter.provisioning.beans.PublishRecord;
-
-/**
- * Load the DB JDBC driver, and manage a simple pool of connections to the DB.
- *
- * @author Robert Eby
- * @version $Id$
- */
-public class DB {
- /** The name of the properties file (in CLASSPATH) */
- public static final String CONFIG_FILE = "provserver.properties";
-
- private static String DB_DRIVER = "com.mysql.jdbc.Driver";
- private static String DB_URL = "jdbc:mysql://127.0.0.1:3306/datarouter";
- private static String DB_LOGIN = "datarouter";
- private static String DB_PASSWORD = "datarouter";
- private static Properties props;
- private static Logger intlogger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private static Queue queue = new LinkedList();
-
- public static String HTTPS_PORT;
- public static String HTTP_PORT;
-
- /**
- * Construct a DB object. If this is the very first creation of this object, it will load a copy
- * of the properties for the server, and attempt to load the JDBC driver for the database. If a fatal
- * error occurs (e.g. either the properties file or the DB driver is missing), the JVM will exit.
- */
- public DB() {
- if (props == null) {
- props = new Properties();
- InputStream inStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE);
- try {
- props.load(inStream);
- DB_DRIVER = (String) props.get("com.att.research.datarouter.db.driver");
- DB_URL = (String) props.get("com.att.research.datarouter.db.url");
- DB_LOGIN = (String) props.get("com.att.research.datarouter.db.login");
- DB_PASSWORD = (String) props.get("com.att.research.datarouter.db.password");
- HTTPS_PORT = (String) props.get("com.att.research.datarouter.provserver.https.port");
- HTTP_PORT = (String) props.get("com.att.research.datarouter.provserver.http.port");
- Class.forName(DB_DRIVER);
- } catch (IOException e) {
- intlogger.fatal("PROV9003 Opening properties: "+e.getMessage());
- e.printStackTrace();
- System.exit(1);
- } catch (ClassNotFoundException e) {
- intlogger.fatal("PROV9004 cannot find the DB driver: "+e);
- e.printStackTrace();
- System.exit(1);
- } finally {
- try {
- inStream.close();
- } catch (IOException e) {
- }
- }
- }
- }
- /**
- * Get the provisioning server properties (loaded from provserver.properties).
- * @return the Properties object
- */
- public Properties getProperties() {
- return props;
- }
- /**
- * Get a JDBC connection to the DB from the pool. Creates a new one if none are available.
- * @return the Connection
- * @throws SQLException
- */
- @SuppressWarnings("resource")
- public Connection getConnection() throws SQLException {
- Connection c = null;
- while (c == null) {
- synchronized (queue) {
- try {
- c = queue.remove();
- } catch (NoSuchElementException e) {
- int n = 0;
- do {
- // Try up to 3 times to get a connection
- try {
- c = DriverManager.getConnection(DB_URL, DB_LOGIN, DB_PASSWORD);
- } catch (SQLException e1) {
- if (++n >= 3)
- throw e1;
- }
- } while (c == null);
- }
- }
- if (c != null && !c.isValid(1)) {
- c.close();
- c = null;
- }
- }
- return c;
- }
- /**
- * Returns a JDBC connection to the pool.
- * @param c the Connection to return
- * @throws SQLException
- */
- public void release(Connection c) {
- if (c != null) {
- synchronized (queue) {
- if (!queue.contains(c))
- queue.add(c);
- }
- }
- }
-
- /**
- * Run all necessary retrofits required to bring the database up to the level required for this version
- * of the provisioning server. This should be run before the server itself is started.
- * @return true if all retrofits worked, false otherwise
- */
- public boolean runRetroFits() {
- return retroFit1()
- && retroFit2()
- && retroFit3()
- && retroFit4()
- && retroFit5()
- && retroFit6()
- && retroFit7()
- && retroFit8()
- && retroFit9() //New retroFit call to add CREATED_DATE column Rally:US674199 - 1610
- && retroFit10() //New retroFit call to add BUSINESS_DESCRIPTION column Rally:US708102 - 1610
- && retroFit11() //New retroFit call for groups feature Rally:US708115 - 1610
- ;
- }
- /**
- * Retrofit 1 - Make sure the expected tables are in MySQL and are initialized.
- * Uses mysql_init_0000 and mysql_init_0001 to setup the DB.
- * @return true if the retrofit worked, false otherwise
- */
- private boolean retroFit1() {
- final String[] expected_tables = {
- "FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS", "SUBSCRIPTIONS"
- };
- Connection c = null;
- try {
- c = getConnection();
- Set tables = getTableSet(c);
- boolean initialize = false;
- for (String s : expected_tables) {
- initialize |= !tables.contains(s);
- }
- if (initialize) {
- intlogger.info("PROV9001: First time startup; The database is being initialized.");
- runInitScript(c, 0); // script 0 creates the provisioning tables
- runInitScript(c, 1); // script 1 initializes PARAMETERS
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 2 - if the LOG_RECORDS table is missing, add it.
- * Uses mysql_init_0002 to create this table.
- * @return true if the retrofit worked, false otherwise
- */
- private boolean retroFit2() {
- Connection c = null;
- try {
- // If LOG_RECORDS table is missing, add it
- c = getConnection();
- Set tables = getTableSet(c);
- if (!tables.contains("LOG_RECORDS")) {
- intlogger.info("PROV9002: Creating LOG_RECORDS table.");
- runInitScript(c, 2); // script 2 creates the LOG_RECORDS table
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 3 - if the FEEDS_UNIQUEID table (from release 1.0.*) exists, drop it.
- * If SUBSCRIPTIONS.SUBID still has the auto_increment attribute, remove it.
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit3() {
- Connection c = null;
- try {
- // if SUBSCRIPTIONS.SUBID still has auto_increment, remove it
- boolean doremove = false;
- c = getConnection();
- DatabaseMetaData md = c.getMetaData();
- ResultSet rs = md.getColumns("datarouter", "", "SUBSCRIPTIONS", "SUBID");
- if (rs != null) {
- while (rs.next()) {
- doremove = rs.getString("IS_AUTOINCREMENT").equals("YES");
- }
- rs.close();
- rs = null;
- }
- if (doremove) {
- intlogger.info("PROV9002: Modifying SUBSCRIPTIONS SUBID column to remove auto increment.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE SUBSCRIPTIONS MODIFY COLUMN SUBID INT UNSIGNED NOT NULL");
- s.close();
- }
-
- // Remove the FEEDS_UNIQUEID table, if it exists
- Set tables = getTableSet(c);
- if (tables.contains("FEEDS_UNIQUEID")) {
- intlogger.info("PROV9002: Dropping FEEDS_UNIQUEID table.");
- Statement s = c.createStatement();
- s.execute("DROP TABLE FEEDS_UNIQUEID");
- s.close();
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- private long nextid = 0; // used for initial creation of LOG_RECORDS table.
- /**
- * Retrofit 4 - if old log tables exist (from release 1.0.*), copy them to LOG_RECORDS, then drop them.
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit4() {
- Connection c = null;
- try {
- c = getConnection();
- Set tables = getTableSet(c);
- if (tables.contains("PUBLISH_RECORDS")) {
- intlogger.info("PROV9002: Copying PUBLISH_RECORDS to LOG_RECORDS table.");
- copyLogTable("PUBLISH_RECORDS", PublishRecord.class);
- intlogger.info("PROV9002: Dropping PUBLISH_RECORDS table.");
- Statement s = c.createStatement();
- s.execute("DROP TABLE PUBLISH_RECORDS");
- s.close();
- }
- if (tables.contains("DELIVERY_RECORDS")) {
- intlogger.info("PROV9002: Copying DELIVERY_RECORDS to LOG_RECORDS table.");
- copyLogTable("DELIVERY_RECORDS", DeliveryRecord.class);
- intlogger.info("PROV9002: Dropping DELIVERY_RECORDS table.");
- Statement s = c.createStatement();
- s.execute("DROP TABLE DELIVERY_RECORDS");
- s.close();
- }
- if (tables.contains("EXPIRY_RECORDS")) {
- intlogger.info("PROV9002: Copying EXPIRY_RECORDS to LOG_RECORDS table.");
- copyLogTable("EXPIRY_RECORDS", ExpiryRecord.class);
- intlogger.info("PROV9002: Dropping EXPIRY_RECORDS table.");
- Statement s = c.createStatement();
- s.execute("DROP TABLE EXPIRY_RECORDS");
- s.close();
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 5 - Create the new routing tables required for Release 2.
- * Adds a new "SUSPENDED" column to FEEDS and SUBSCRIPTIONS.
- * Modifies the LOG_RECORDS table to handle new R2 records.
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit5() {
- final String[] expected_tables = {
- "INGRESS_ROUTES", "EGRESS_ROUTES", "NETWORK_ROUTES", "NODESETS", "NODES"
- };
- Connection c = null;
- try {
- // If expected tables are not present, then add new routing tables
- c = getConnection();
- Set tables = getTableSet(c);
- boolean initialize = false;
- for (String s : expected_tables) {
- initialize |= !tables.contains(s);
- }
- if (initialize) {
- intlogger.info("PROV9002: Adding routing tables for Release 2.0.");
- runInitScript(c, 3); // script 3 creates the routing tables
- }
-
- // Add SUSPENDED column to FEEDS/SUBSCRIPTIONS
- DatabaseMetaData md = c.getMetaData();
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {
- boolean add_col = true;
- ResultSet rs = md.getColumns("datarouter", "", tbl, "SUSPENDED");
- if (rs != null) {
- add_col = !rs.next();
- rs.close();
- rs = null;
- }
- if (add_col) {
- intlogger.info("PROV9002: Adding SUSPENDED column to "+tbl+" table.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN SUSPENDED BOOLEAN DEFAULT FALSE");
- s.close();
- }
- }
-
- // Modify LOG_RECORDS for R2
- intlogger.info("PROV9002: Modifying LOG_RECORDS table.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN TYPE ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL");
- s.close();
- s = c.createStatement();
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN REASON ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other')");
- s.close();
- boolean add_col = true;
- ResultSet rs = md.getColumns("datarouter", "", "LOG_RECORDS", "CONTENT_LENGTH_2");
- if (rs != null) {
- add_col = !rs.next();
- rs.close();
- rs = null;
- }
- if (add_col) {
- intlogger.info("PROV9002: Fixing two columns in LOG_RECORDS table (this may take some time).");
- s = c.createStatement();
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN CONTENT_LENGTH BIGINT NOT NULL, ADD COLUMN CONTENT_LENGTH_2 BIGINT AFTER RECORD_ID");
- s.close();
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 6 - Adjust LOG_RECORDS.USER to be 50 chars (MR #74).
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit6() {
- Connection c = null;
- try {
- c = getConnection();
- // Modify LOG_RECORDS for R2
- intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN USER VARCHAR(50)");
- s.close();
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 7 - Adjust LOG_RECORDS.FEED_FILEID and LOG_RECORDS.DELIVERY_FILEID to be 256 chars.
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit7() {
- Connection c = null;
- try {
- c = getConnection();
- // Modify LOG_RECORDS for long (>128) FILEIDs
- intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN FEED_FILEID VARCHAR(256), MODIFY COLUMN DELIVERY_FILEID VARCHAR(256)");
- s.close();
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
- /**
- * Retrofit 8 - Adjust FEEDS.NAME to be 255 chars (MR #74).
- * @return true if the retrofit worked, false otherwise
- */
- @SuppressWarnings("resource")
- private boolean retroFit8() {
- Connection c = null;
- try {
- c = getConnection();
- intlogger.info("PROV9002: Modifying FEEDS.NAME length.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE FEEDS MODIFY COLUMN NAME VARCHAR(255)");
- s.close();
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
-
- /**
- * Retrofit 9 - Add column FEEDS.CREATED_DATE and SUBSCRIPTIONS.CREATED_DATE, 1610 release user story US674199.
- * @return true if the retrofit worked, false otherwise
- */
-
- @SuppressWarnings("resource")
- private boolean retroFit9() {
- Connection c = null;
- try {
- c = getConnection();
- // Add CREATED_DATE column to FEEDS/SUBSCRIPTIONS tables
- DatabaseMetaData md = c.getMetaData();
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {
- boolean add_col = true;
- ResultSet rs = md.getColumns("datarouter", "", tbl, "CREATED_DATE");
- if (rs != null) {
- add_col = !rs.next();
- rs.close();
- rs = null;
- }
- if (add_col) {
- intlogger.info("PROV9002: Adding CREATED_DATE column to "+tbl+" table.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN CREATED_DATE timestamp DEFAULT CURRENT_TIMESTAMP");
- s.close();
- }
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
-
- /**
- * Retrofit 10 -Adding business BUSINESS_DESCRIPTION to FEEDS table (Rally
- * US708102).
- *
- * @return true if the retrofit worked, false otherwise
- */
-
- @SuppressWarnings("resource")
- private boolean retroFit10() {
- Connection c = null;
- boolean addColumn = true;
-
- try {
-
- c = getConnection();
- // Add BUSINESS_DESCRIPTION column to FEEDS table
- DatabaseMetaData md = c.getMetaData();
- boolean add_col = true;
- ResultSet rs = md.getColumns("datarouter", "", "FEEDS", "BUSINESS_DESCRIPTION");
- if (rs != null) {
- add_col = !rs.next();
- rs.close();
- rs = null;
- }
- if(add_col) {
- intlogger
- .info("PROV9002: Adding BUSINESS_DESCRIPTION column to FEEDS table.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE FEEDS ADD COLUMN BUSINESS_DESCRIPTION varchar(1000) DEFAULT NULL AFTER DESCRIPTION, MODIFY COLUMN DESCRIPTION VARCHAR(1000)");
- s.close();
- }
- }
- catch (SQLException e) {
- intlogger
- .fatal("PROV9000: The database credentials are not working: "
- + e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
-
-
- /*New retroFit method is added for groups feature Rally:US708115 - 1610
- * @retroFit11()
- * @parmas: none
- * @return - boolean if table and fields are created (Group table, group id in FEEDS, SUBSCRIPTION TABLES)
- */
- @SuppressWarnings("resource")
- private boolean retroFit11() {
- final String[] expected_tables = {
- "GROUPS"
- };
- Connection c = null;
-
- try {
- // If expected tables are not present, then add new routing tables
- c = getConnection();
- Set tables = getTableSet(c);
- boolean initialize = false;
- for (String s : expected_tables) {
- initialize |= !tables.contains(s);
- }
- if (initialize) {
- intlogger.info("PROV9002: Adding GROUPS table for Release 1610.");
- runInitScript(c, 4); // script 4 creates the routing tables
- }
-
- // Add GROUPID column to FEEDS/SUBSCRIPTIONS
- DatabaseMetaData md = c.getMetaData();
- for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {
- boolean add_col = true;
- ResultSet rs = md.getColumns("datarouter", "", tbl, "GROUPID");
- if (rs != null) {
- add_col = !rs.next();
- rs.close();
- rs = null;
- }
- if (add_col) {
- intlogger.info("PROV9002: Adding GROUPID column to "+tbl+" table.");
- Statement s = c.createStatement();
- s.execute("ALTER TABLE "+tbl+" ADD COLUMN GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0 AFTER FEEDID");
- s.close();
- }
- }
- } catch (SQLException e) {
- intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());
- return false;
- } finally {
- if (c != null)
- release(c);
- }
- return true;
- }
-
-
- /**
- * Copy the log table table_name to LOG_RECORDS;
- * @param table_name the name of the old (1.0.*) table to copy
- * @param table_class the class used to instantiate a record from the table
- * @throws SQLException if there is a problem getting a MySQL connection
- */
- @SuppressWarnings("resource")
- private void copyLogTable(String table_name, Class extends Loadable> table_class) throws SQLException {
- long start = System.currentTimeMillis();
- int n = 0;
- Connection c1 = getConnection();
- Connection c2 = getConnection();
-
- try {
- Constructor extends Loadable> cnst = table_class.getConstructor(ResultSet.class);
- PreparedStatement ps = c2.prepareStatement(LogfileLoader.INSERT_SQL);
- Statement stmt = c1.createStatement();
- ResultSet rs = stmt.executeQuery("select * from "+table_name);
- while (rs.next()) {
- Loadable rec = cnst.newInstance(rs);
- rec.load(ps);
- ps.setLong(18, ++nextid);
- ps.executeUpdate();
- if ((++n % 10000) == 0)
- intlogger.debug(" "+n+" records done.");
- }
- stmt.close();
- ps.close();
- } catch (SQLException e) {
- e.printStackTrace();
- } catch (NoSuchMethodException e) {
- e.printStackTrace();
- } catch (SecurityException e) {
- e.printStackTrace();
- } catch (InstantiationException e) {
- e.printStackTrace();
- } catch (IllegalAccessException e) {
- e.printStackTrace();
- } catch (IllegalArgumentException e) {
- e.printStackTrace();
- } catch (InvocationTargetException e) {
- e.printStackTrace();
- }
-
- release(c1);
- release(c2);
- long x = (System.currentTimeMillis() - start);
- intlogger.debug(" "+n+" records done in "+x+" ms.");
- }
-
- /**
- * Get a set of all table names in the DB.
- * @param c a DB connection
- * @return the set of table names
- */
- private Set getTableSet(Connection c) {
- Set tables = new HashSet();
- try {
- DatabaseMetaData md = c.getMetaData();
- ResultSet rs = md.getTables("datarouter", "", "", null);
- if (rs != null) {
- while (rs.next()) {
- tables.add(rs.getString("TABLE_NAME"));
- }
- rs.close();
- }
- } catch (SQLException e) {
- }
- return tables;
- }
- /**
- * Initialize the tables by running the initialization scripts located in the directory specified
- * by the property com.att.research.datarouter.provserver.dbscripts . Scripts have names of
- * the form mysql_init_NNNN.
- * @param c a DB connection
- * @param n the number of the mysql_init_NNNN script to run
- */
- private void runInitScript(Connection c, int n) {
- String scriptdir = (String) props.get("com.att.research.datarouter.provserver.dbscripts");
- StringBuilder sb = new StringBuilder();
- try {
- String scriptfile = String.format("%s/mysql_init_%04d", scriptdir, n);
- if (!(new File(scriptfile)).exists())
- return;
-
- LineNumberReader in = new LineNumberReader(new FileReader(scriptfile));
- String line;
- while ((line = in.readLine()) != null) {
- if (!line.startsWith("--")) {
- line = line.trim();
- sb.append(line);
- if (line.endsWith(";")) {
- // Execute one DDL statement
- String sql = sb.toString();
- sb.setLength(0);
- Statement s = c.createStatement();
- s.execute(sql);
- s.close();
- }
- }
- }
- in.close();
- sb.setLength(0);
- } catch (Exception e) {
- intlogger.fatal("PROV9002 Error when initializing table: "+e.getMessage());
- System.exit(1);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DRRouteCLI.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DRRouteCLI.java
deleted file mode 100644
index 36d46e3b..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/DRRouteCLI.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.LineNumberReader;
-import java.security.KeyStore;
-import java.util.Arrays;
-import java.util.Properties;
-
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.StatusLine;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.conn.scheme.Scheme;
-import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.impl.client.AbstractHttpClient;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.util.EntityUtils;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-
-/**
- * This class provides a Command Line Interface for the routing tables in the DR Release 2.0 DB.
- * A full description of this command is here .
- *
- * @author Robert Eby
- * @version $Id: DRRouteCLI.java,v 1.2 2013/11/05 15:54:16 eby Exp $
- */
-public class DRRouteCLI {
- /**
- * Invoke the CLI. The CLI can be run with a single command (given as command line arguments),
- * or in an interactive mode where the user types a sequence of commands to the program. The CLI is invoked via:
- *
- * java com.att.research.datarouter.provisioning.utils.DRRouteCLI [ -s server ] [ command ]
- *
- * A full description of the arguments to this command are
- * here .
- *
- * @param args command line arguments
- * @throws Exception for any unrecoverable problem
- */
- public static void main(String[] args) throws Exception {
- String server = System.getenv(ENV_VAR);
- if (args.length >= 2 && args[0].equals("-s")) {
- server = args[1];
- String[] t = new String[args.length-2];
- if (t.length > 0)
- System.arraycopy(args, 2, t, 0, t.length);
- args = t;
- }
- if (server == null || server.equals("")) {
- System.err.println("dr-route: you need to specify a server, either via $PROVSRVR or the '-s' option.");
- System.exit(1);
- }
- DRRouteCLI cli = new DRRouteCLI(server);
- if (args.length > 0) {
- boolean b = cli.runCommand(args);
- System.exit(b ? 0 : 1);
- } else {
- cli.interactive();
- System.exit(0);
- }
- }
-
- public static final String ENV_VAR = "PROVSRVR";
- public static final String PROMPT = "dr-route> ";
- public static final String DEFAULT_TRUSTSTORE_PATH = /* $JAVA_HOME + */ "/jre/lib/security/cacerts";
-
- private final String server;
- private int width = 120; // screen width (for list)
- private AbstractHttpClient httpclient;
-
- /**
- * Create a DRRouteCLI object connecting to the specified server.
- * @param server the server to send command to
- * @throws Exception
- */
- public DRRouteCLI(String server) throws Exception {
- this.server = server;
- this.width = 120;
- this.httpclient = new DefaultHttpClient();
-
- Properties p = (new DB()).getProperties();
- String truststore_file = p.getProperty("com.att.research.datarouter.provserver.truststore.path");
- String truststore_pw = p.getProperty("com.att.research.datarouter.provserver.truststore.password");
-
- KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
- if (truststore_file == null || truststore_file.equals("")) {
- String jhome = System.getenv("JAVA_HOME");
- if (jhome == null || jhome.equals(""))
- jhome = "/opt/java/jdk/jdk180";
- truststore_file = jhome + DEFAULT_TRUSTSTORE_PATH;
- }
- File f = new File(truststore_file);
- if (f.exists()) {
- FileInputStream instream = new FileInputStream(f);
- try {
- trustStore.load(instream, truststore_pw.toCharArray());
- } catch (Exception x) {
- System.err.println("Problem reading truststore: "+x);
- throw x;
- } finally {
- try { instream.close(); } catch (Exception ignore) {}
- }
- }
-
- SSLSocketFactory socketFactory = new SSLSocketFactory(trustStore);
- Scheme sch = new Scheme("https", 443, socketFactory);
- httpclient.getConnectionManager().getSchemeRegistry().register(sch);
- }
-
- private void interactive() throws IOException {
- LineNumberReader in = new LineNumberReader(new InputStreamReader(System.in));
- while (true) {
- System.out.print(PROMPT);
- String line = in.readLine();
- if (line == null)
- return;
- line = line.trim();
- if (line.equalsIgnoreCase("exit")) // "exit" may only be used in interactive mode
- return;
- if (line.equalsIgnoreCase("quit")) // "quit" may only be used in interactive mode
- return;
- String[] args = line.split("[ \t]+");
- if (args.length > 0)
- runCommand(args);
- }
- }
-
- /**
- * Run the command specified by the arguments.
- * @param args The command line arguments.
- * @return true if the command was valid and succeeded
- */
- public boolean runCommand(String[] args) {
- String cmd = args[0].trim().toLowerCase();
- if (cmd.equals("add")) {
- if (args.length > 2) {
- if (args[1].startsWith("in") && args.length >= 6) {
- return addIngress(args);
- }
- if (args[1].startsWith("eg") && args.length == 4) {
- return addEgress(args);
- }
- if (args[1].startsWith("ne") && args.length == 5) {
- return addRoute(args);
- }
- }
- System.err.println("Add command should be one of:");
- System.err.println(" add in[gress] feedid user subnet nodepatt [ seq ]");
- System.err.println(" add eg[ress] subid node");
- System.err.println(" add ne[twork] fromnode tonode vianode");
- } else if (cmd.startsWith("del")) {
- if (args.length > 2) {
- if (args[1].startsWith("in") && args.length == 5) {
- return delIngress(args);
- }
- if (args[1].startsWith("in") && args.length == 3) {
- return delIngress(args);
- }
- if (args[1].startsWith("eg") && args.length == 3) {
- return delEgress(args);
- }
- if (args[1].startsWith("ne") && args.length == 4) {
- return delRoute(args);
- }
- }
- System.err.println("Delete command should be one of:");
- System.err.println(" del in[gress] feedid user subnet");
- System.err.println(" del in[gress] seq");
- System.err.println(" del eg[ress] subid");
- System.err.println(" del ne[twork] fromnode tonode");
- } else if (cmd.startsWith("lis")) {
- return list(args);
- } else if (cmd.startsWith("wid") && args.length > 1) {
- width = Integer.parseInt(args[1]);
- return true;
- } else if (cmd.startsWith("?") || cmd.startsWith("hel") || cmd.startsWith("usa")) {
- usage();
- } else if (cmd.startsWith("#")) {
- // comment -- ignore
- } else {
- System.err.println("Command should be one of add, del, list, exit, quit");
- }
- return false;
- }
-
- private void usage() {
- System.out.println("Enter one of the following commands:");
- System.out.println(" add in[gress] feedid user subnet nodepatt [ seq ]");
- System.out.println(" add eg[ress] subid node");
- System.out.println(" add ne[twork] fromnode tonode vianode");
- System.out.println(" del in[gress] feedid user subnet");
- System.out.println(" del in[gress] seq");
- System.out.println(" del eg[ress] subid");
- System.out.println(" del ne[twork] fromnode tonode");
- System.out.println(" list [ all | ingress | egress | network ]");
- System.out.println(" exit");
- System.out.println(" quit");
- }
-
- private boolean addIngress(String[] args) {
- String url = String.format("https://%s/internal/route/ingress/?feed=%s&user=%s&subnet=%s&nodepatt=%s", server, args[2], args[3], args[4], args[5]);
- if (args.length > 6)
- url += "&seq=" + args[6];
- return doPost(url);
- }
-
- private boolean addEgress(String[] args) {
- String url = String.format("https://%s/internal/route/egress/?sub=%s&node=%s", server, args[2], args[3]);
- return doPost(url);
- }
-
- private boolean addRoute(String[] args) {
- String url = String.format("https://%s/internal/route/network/?from=%s&to=%s&via=%s", server, args[2], args[3], args[4]);
- return doPost(url);
- }
-
- private boolean delIngress(String[] args) {
- String url;
- if (args.length == 5) {
- String subnet = args[4].replaceAll("/", "!"); // replace the / with a !
- url = String.format("https://%s/internal/route/ingress/%s/%s/%s", server, args[2], args[3], subnet);
- } else {
- url = String.format("https://%s/internal/route/ingress/%s", server, args[2]);
- }
- return doDelete(url);
- }
-
- private boolean delEgress(String[] args) {
- String url = String.format("https://%s/internal/route/egress/%s", server, args[2]);
- return doDelete(url);
- }
-
- private boolean delRoute(String[] args) {
- String url = String.format("https://%s/internal/route/network/%s/%s", server, args[2], args[3]);
- return doDelete(url);
- }
-
- private boolean list(String[] args) {
- String tbl = (args.length == 1) ? "all" : args[1].toLowerCase();
- JSONObject jo = doGet("https://"+server+"/internal/route/"); // Returns all 3 tables
- StringBuilder sb = new StringBuilder();
- if (tbl.startsWith("al") || tbl.startsWith("in")) {
- // Display the IRT
- JSONArray irt = jo.optJSONArray("ingress");
- int cw1 = 6, cw2 = 6, cw3 = 6, cw4 = 6; // determine column widths for first 4 cols
- for (int i = 0; irt != null && i < irt.length(); i++) {
- JSONObject e = irt.getJSONObject(i);
- cw1 = Math.max(cw1, (""+ e.getInt("seq")).length());
- cw2 = Math.max(cw2, (""+e.getInt("feedid")).length());
- String t = e.optString("user");
- cw3 = Math.max(cw3, (t == null) ? 1 : t.length());
- t = e.optString("subnet");
- cw4 = Math.max(cw4, (t == null) ? 1 : t.length());
- }
-
- int nblank = cw1 + cw2 + cw3 + cw4 + 8;
- sb.append("Ingress Routing Table\n");
- sb.append(String.format("%s %s %s %s Nodes\n", ext("Seq", cw1), ext("FeedID", cw2), ext("User", cw3), ext("Subnet", cw4)));
- for (int i = 0; irt != null && i < irt.length(); i++) {
- JSONObject e = irt.getJSONObject(i);
- String seq = ""+e.getInt("seq");
- String feedid = ""+e.getInt("feedid");
- String user = e.optString("user");
- String subnet = e.optString("subnet");
- if (user.equals("")) user = "-";
- if (subnet.equals("")) subnet = "-";
- JSONArray nodes = e.getJSONArray("node");
- int sol = sb.length();
- sb.append(String.format("%s %s %s %s ", ext(seq, cw1), ext(feedid, cw2), ext(user, cw3), ext(subnet, cw4)));
- for (int j = 0; j < nodes.length(); j++) {
- String nd = nodes.getString(j);
- int cursor = sb.length() - sol;
- if (j > 0 && (cursor + nd.length() > width)) {
- sb.append("\n");
- sol = sb.length();
- sb.append(ext(" ", nblank));
- }
- sb.append(nd);
- if ((j+1) < nodes.length()) {
- sb.append(", ");
- }
- }
- sb.append("\n");
- }
- }
- if (tbl.startsWith("al") || tbl.startsWith("eg")) {
- // Display the ERT
- JSONObject ert = jo.optJSONObject("egress");
- String[] subs = (ert == null) ? new String[0] : JSONObject.getNames(ert);
- if (subs == null)
- subs = new String[0];
- Arrays.sort(subs);
- int cw1 = 5;
- for (int i = 0; i < subs.length; i++) {
- cw1 = Math.max(cw1, subs[i].length());
- }
-
- if (sb.length() > 0)
- sb.append("\n");
- sb.append("Egress Routing Table\n");
- sb.append(String.format("%s Node\n", ext("SubID", cw1)));
- for (int i = 0; i < subs.length; i++) {
- String node = ert.getString(subs[i]);
- sb.append(String.format("%s %s\n", ext(subs[i], cw1), node));
- }
- }
- if (tbl.startsWith("al") || tbl.startsWith("ne")) {
- // Display the NRT
- JSONArray nrt = jo.optJSONArray("routing");
- int cw1 = 4, cw2 = 4;
- for (int i = 0; nrt != null && i < nrt.length(); i++) {
- JSONObject e = nrt.getJSONObject(i);
- String from = e.getString("from");
- String to = e.getString("to");
- cw1 = Math.max(cw1, from.length());
- cw2 = Math.max(cw2, to.length());
- }
-
- if (sb.length() > 0)
- sb.append("\n");
- sb.append("Network Routing Table\n");
- sb.append(String.format("%s %s Via\n", ext("From", cw1), ext("To", cw2)));
- for (int i = 0; nrt != null && i < nrt.length(); i++) {
- JSONObject e = nrt.getJSONObject(i);
- String from = e.getString("from");
- String to = e.getString("to");
- String via = e.getString("via");
- sb.append(String.format("%s %s %s\n", ext(from, cw1), ext(to, cw2), via));
- }
- }
- System.out.print(sb.toString());
- return true;
- }
- private String ext(String s, int n) {
- if (s == null)
- s = "-";
- while (s.length() < n)
- s += " ";
- return s;
- }
-
- private boolean doDelete(String url) {
- boolean rv = false;
- HttpDelete meth = new HttpDelete(url);
- try {
- HttpResponse response = httpclient.execute(meth);
- HttpEntity entity = response.getEntity();
- StatusLine sl = response.getStatusLine();
- rv = (sl.getStatusCode() == HttpServletResponse.SC_OK);
- if (rv) {
- System.out.println("Routing entry deleted.");
- EntityUtils.consume(entity);
- } else {
- printErrorText(entity);
- }
- } catch (Exception e) {
- } finally {
- meth.releaseConnection();
- }
- return rv;
- }
-
- private JSONObject doGet(String url) {
- JSONObject rv = new JSONObject();
- HttpGet meth = new HttpGet(url);
- try {
- HttpResponse response = httpclient.execute(meth);
- HttpEntity entity = response.getEntity();
- StatusLine sl = response.getStatusLine();
- if (sl.getStatusCode() == HttpServletResponse.SC_OK) {
- rv = new JSONObject(new JSONTokener(entity.getContent()));
- } else {
- printErrorText(entity);
- }
- } catch (Exception e) {
- System.err.println(e);
- } finally {
- meth.releaseConnection();
- }
- return rv;
- }
-
- private boolean doPost(String url) {
- boolean rv = false;
- HttpPost meth = new HttpPost(url);
- try {
- HttpResponse response = httpclient.execute(meth);
- HttpEntity entity = response.getEntity();
- StatusLine sl = response.getStatusLine();
- rv = (sl.getStatusCode() == HttpServletResponse.SC_OK);
- if (rv) {
- System.out.println("Routing entry added.");
- EntityUtils.consume(entity);
- } else {
- printErrorText(entity);
- }
- } catch (Exception e) {
- } finally {
- meth.releaseConnection();
- }
- return rv;
- }
-
- private void printErrorText(HttpEntity entity) throws IllegalStateException, IOException {
- // Look for and print only the part of the output between ...
- InputStream is = entity.getContent();
- StringBuilder sb = new StringBuilder();
- byte[] b = new byte[512];
- int n = 0;
- while ((n = is.read(b)) > 0) {
- sb.append(new String(b, 0, n));
- }
- is.close();
- int ix = sb.indexOf("");
- if (ix > 0)
- sb.delete(0, ix+5);
- ix = sb.indexOf(" ");
- if (ix > 0)
- sb.delete(ix, sb.length());
- System.err.println(sb.toString());
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/JSONUtilities.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/JSONUtilities.java
deleted file mode 100644
index e1676588..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/JSONUtilities.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Collection;
-
-/**
- * Some utility functions used when creating/validating JSON.
- *
- * @author Robert Eby
- * @version $Id: JSONUtilities.java,v 1.1 2013/04/26 21:00:26 eby Exp $
- */
-public class JSONUtilities {
- /**
- * Does the String v represent a valid Internet address (with or without a
- * mask length appended).
- * @param v the string to check
- * @return true if valid, false otherwise
- */
- public static boolean validIPAddrOrSubnet(String v) {
- String[] pp = { v, "" };
- if (v.indexOf('/') > 0)
- pp = v.split("/");
- try {
- InetAddress addr = InetAddress.getByName(pp[0]);
- if (pp[1].length() > 0) {
- // check subnet mask
- int mask = Integer.parseInt(pp[1]);
- if (mask > (addr.getAddress().length * 8))
- return false;
- }
- return true;
- } catch (UnknownHostException e) {
- return false;
- }
- }
- /**
- * Build a JSON array from a collection of Strings.
- * @param coll the collection
- * @return a String containing a JSON array
- */
- public static String createJSONArray(Collection coll) {
- StringBuilder sb = new StringBuilder("[");
- String pfx = "\n";
- for (String t : coll) {
- sb.append(pfx).append(" \"").append(t).append("\"");
- pfx = ",\n";
- }
- sb.append("\n]\n");
- return sb.toString();
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/LogfileLoader.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/LogfileLoader.java
deleted file mode 100644
index f9c11f18..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/LogfileLoader.java
+++ /dev/null
@@ -1,549 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.LineNumberReader;
-import java.io.Reader;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.TreeSet;
-import java.util.zip.GZIPInputStream;
-
-import org.apache.log4j.Logger;
-
-import com.att.research.datarouter.provisioning.BaseServlet;
-import com.att.research.datarouter.provisioning.beans.DeliveryExtraRecord;
-import com.att.research.datarouter.provisioning.beans.DeliveryRecord;
-import com.att.research.datarouter.provisioning.beans.ExpiryRecord;
-import com.att.research.datarouter.provisioning.beans.Loadable;
-import com.att.research.datarouter.provisioning.beans.LogRecord;
-import com.att.research.datarouter.provisioning.beans.Parameters;
-import com.att.research.datarouter.provisioning.beans.PubFailRecord;
-import com.att.research.datarouter.provisioning.beans.PublishRecord;
-
-/**
- * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.
- * These logfiles are loaded into the MySQL LOG_RECORDS table. In a running provisioning server, there should only be
- * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at
- * startup to load the old (1.0) style log tables into LOG_RECORDS;
- * This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the
- * database.
- * This bit set is used to synchronize between provisioning servers.
- *
- * @author Robert Eby
- * @version $Id: LogfileLoader.java,v 1.22 2014/03/12 19:45:41 eby Exp $
- */
-public class LogfileLoader extends Thread {
- /** Default number of log records to keep when pruning. Keep 10M by default. */
- public static final long DEFAULT_LOG_RETENTION = 10000000L;
- /** NOT USED: Percentage of free space required before old records are removed. */
- public static final int REQUIRED_FREE_PCT = 20;
-
- /** This is a singleton -- there is only one LogfileLoader object in the server */
- private static LogfileLoader p;
-
- /**
- * Get the singleton LogfileLoader object, and start it if it is not running.
- * @return the LogfileLoader
- */
- public static synchronized LogfileLoader getLoader() {
- if (p == null)
- p = new LogfileLoader();
- if (!p.isAlive())
- p.start();
- return p;
- }
-
- /** The PreparedStatement which is loaded by a Loadable . */
- public static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
- /** Each server can assign this many IDs */
- private static final long SET_SIZE = (1L << 56);
-
- private final Logger logger;
- private final DB db;
- private final String spooldir;
- private final long set_start;
- private final long set_end;
- private RLEBitSet seq_set;
- private long nextid;
- private boolean idle;
-
- private LogfileLoader() {
- this.logger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- this.db = new DB();
- this.spooldir = db.getProperties().getProperty("com.att.research.datarouter.provserver.spooldir");
- this.set_start = getIdRange();
- this.set_end = set_start + SET_SIZE - 1;
- this.seq_set = new RLEBitSet();
- this.nextid = 0;
- this.idle = false;
-
- // This is a potentially lengthy operation, so has been moved to run()
- //initializeNextid();
- this.setDaemon(true);
- this.setName("LogfileLoader");
- }
-
- private long getIdRange() {
- long n;
- if (BaseServlet.isInitialActivePOD())
- n = 0;
- else if (BaseServlet.isInitialStandbyPOD())
- n = SET_SIZE;
- else
- n = SET_SIZE * 2;
- String r = String.format("[%X .. %X]", n, n+SET_SIZE-1);
- logger.debug("This server shall assign RECORD_IDs in the range "+r);
- return n;
- }
- /**
- * Return the bit set representing the record ID's that are loaded in this database.
- * @return the bit set
- */
- public RLEBitSet getBitSet() {
- return seq_set;
- }
- /**
- * True if the LogfileLoader is currently waiting for work.
- * @return true if idle
- */
- public boolean isIdle() {
- return idle;
- }
- /**
- * Run continuously to look for new logfiles in the spool directory and import them into the DB.
- * The spool is checked once per second. If free space on the MySQL filesystem falls below
- * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS
- * table is compacted until free space rises above the threshold.
- */
- @Override
- public void run() {
- initializeNextid(); // moved from the constructor
- while (true) {
- try {
- File dirfile = new File(spooldir);
- while (true) {
- // process IN files
- File[] infiles = dirfile.listFiles(new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.startsWith("IN.");
- }
- });
-
- if (infiles.length == 0) {
- idle = true;
- try {
- Thread.sleep(1000L);
- } catch (InterruptedException e) {
- }
- idle = false;
- } else {
- // Remove old rows
- if (pruneRecords()) {
- // Removed at least some entries, recompute the bit map
- initializeNextid();
- }
-
- // Process incoming logfiles
- for (File f : infiles) {
- if (logger.isDebugEnabled())
- logger.debug("PROV8001 Starting " + f + " ...");
- long time = System.currentTimeMillis();
- int[] n = process(f);
- time = System.currentTimeMillis() - time;
- logger.info(String
- .format("PROV8000 Processed %s in %d ms; %d of %d records.",
- f.toString(), time, n[0], n[1]));
- f.delete();
- }
- }
- }
- } catch (Exception e) {
- logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);
- e.printStackTrace();
- }
- }
- }
- private boolean pruneRecords() {
- boolean did1 = false;
- long count = countRecords();
- long threshold = DEFAULT_LOG_RETENTION;
- Parameters param = Parameters.getParameter(Parameters.PROV_LOG_RETENTION);
- if (param != null) {
- try {
- long n = Long.parseLong(param.getValue());
- // This check is to prevent inadvertent errors from wiping the table out
- if (n > 1000000L)
- threshold = n;
- } catch (NumberFormatException e) {
- // ignore
- }
- }
- logger.debug("Pruning LOG_RECORD table: records in DB="+count+", threshold="+threshold);
- if (count > threshold) {
- count -= threshold; // we need to remove this many records;
- Map hist = getHistogram(); // histogram of records per day
- // Determine the cutoff point to remove the needed number of records
- long sum = 0;
- long cutoff = 0;
- for (Long day : new TreeSet(hist.keySet())) {
- sum += hist.get(day);
- cutoff = day;
- if (sum >= count)
- break;
- }
- cutoff++;
- cutoff *= 86400000L; // convert day to ms
- logger.debug(" Pruning records older than="+(cutoff/86400000L)+" ("+new Date(cutoff)+")");
-
- Connection conn = null;
- try {
- // Limit to a million at a time to avoid typing up the DB for too long.
- conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement("DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000");
- ps.setLong(1, cutoff);
- while (count > 0) {
- if (!ps.execute()) {
- int dcount = ps.getUpdateCount();
- count -= dcount;
- logger.debug(" "+dcount+" rows deleted.");
- did1 |= (dcount!=0);
- if (dcount == 0)
- count = 0; // prevent inf. loops
- } else {
- count = 0; // shouldn't happen!
- }
- }
- ps.close();
- Statement stmt = conn.createStatement();
- stmt.execute("OPTIMIZE TABLE LOG_RECORDS");
- stmt.close();
- } catch (SQLException e) {
- System.err.println(e);
- e.printStackTrace();
- } finally {
- db.release(conn);
- }
- }
- return did1;
- }
- private long countRecords() {
- long count = 0;
- Connection conn = null;
- try {
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS");
- if (rs.next()) {
- count = rs.getLong("COUNT");
- }
- rs.close();
- stmt.close();
- } catch (SQLException e) {
- System.err.println(e);
- e.printStackTrace();
- } finally {
- db.release(conn);
- }
- return count;
- }
- private Map getHistogram() {
- Map map = new HashMap();
- Connection conn = null;
- try {
- logger.debug(" LOG_RECORD table histogram...");
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- ResultSet rs = stmt.executeQuery("SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY");
- while (rs.next()) {
- long day = rs.getLong("DAY");
- long cnt = rs.getLong("COUNT");
- map.put(day, cnt);
- logger.debug(" "+day + " "+cnt);
- }
- rs.close();
- stmt.close();
- } catch (SQLException e) {
- System.err.println(e);
- e.printStackTrace();
- } finally {
- db.release(conn);
- }
- return map;
- }
- private void initializeNextid() {
- Connection conn = null;
- try {
- conn = db.getConnection();
- Statement stmt = conn.createStatement();
- // Build a bitset of all records in the LOG_RECORDS table
- // We need to run this SELECT in stages, because otherwise we run out of memory!
- RLEBitSet nbs = new RLEBitSet();
- final long stepsize = 6000000L;
- boolean go_again = true;
- for (long i = 0; go_again; i += stepsize) {
- String sql = String.format("select RECORD_ID from LOG_RECORDS LIMIT %d,%d", i, stepsize);
- ResultSet rs = stmt.executeQuery(sql);
- go_again = false;
- while (rs.next()) {
- long n = rs.getLong("RECORD_ID");
- nbs.set(n);
- go_again = true;
- }
- rs.close();
- }
- stmt.close();
- seq_set = nbs;
-
- // Compare with the range for this server
- // Determine the next ID for this set of record IDs
- RLEBitSet tbs = (RLEBitSet) nbs.clone();
- RLEBitSet idset = new RLEBitSet();
- idset.set(set_start, set_start+SET_SIZE);
- tbs.and(idset);
- long t = tbs.length();
- nextid = (t == 0) ? set_start : (t - 1);
- if (nextid >= set_start+SET_SIZE) {
- // Handle wraparound, when the IDs reach the end of our "range"
- Long[] last = null;
- Iterator li = tbs.getRangeIterator();
- while (li.hasNext()) {
- last = li.next();
- }
- if (last != null) {
- tbs.clear(last[0], last[1]+1);
- t = tbs.length();
- nextid = (t == 0) ? set_start : (t - 1);
- }
- }
- logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextid, nextid));
- } catch (SQLException e) {
- System.err.println(e);
- e.printStackTrace();
- } finally {
- db.release(conn);
- }
- }
-// OLD CODE - commented here for historical purposes
-//
-// private boolean pruneRecordsOldAlgorithm() {
-// // Determine space available -- available space must be at least 20% under /opt/app/mysql
-// int pct = getFreePercentage();
-// boolean did1 = false;
-// while (pct < REQUIRED_FREE_PCT) {
-// logger.info("PROV8008: Free space is " + pct + "% - removing old log entries");
-// boolean didit = removeOldestEntries();
-// pct = didit ? getFreePercentage() : 100; // don't loop endlessly
-// did1 |= didit;
-// }
-// return did1;
-// }
-// private int getFreePercentage() {
-// FileSystem fs = (Paths.get("/opt/app/mysql")).getFileSystem();
-// long total = 0;
-// long avail = 0;
-// try {
-// for (FileStore store : fs.getFileStores()) {
-// total += store.getTotalSpace();
-// avail += store.getUsableSpace();
-// }
-// } catch (IOException e) {
-// }
-// try { fs.close(); } catch (Exception e) { }
-// return (int)((avail * 100) / total);
-// }
-// private boolean removeOldestEntries() {
-// // Remove the last days worth of entries
-// Connection conn = null;
-// try {
-// conn = db.getConnection();
-// Statement stmt = conn.createStatement();
-// ResultSet rs = stmt.executeQuery("select min(event_time) as MIN from LOG_RECORDS");
-// if (rs != null) {
-// if (rs.next()) {
-// // Compute the end of the first day of logs
-// long first = rs.getLong("MIN");
-// Calendar cal = new GregorianCalendar();
-// cal.setTime(new Date(first));
-// cal.add(Calendar.DAY_OF_YEAR, 1);
-// cal.set(Calendar.HOUR_OF_DAY, 0);
-// cal.set(Calendar.MINUTE, 0);
-// cal.set(Calendar.SECOND, 0);
-// cal.set(Calendar.MILLISECOND, 0);
-// if (!stmt.execute("delete from LOG_RECORDS where event_time < " + cal.getTimeInMillis())) {
-// int count = stmt.getUpdateCount();
-// logger.info("PROV0009: Removed "+count+" old log entries.");
-// stmt.execute("OPTIMIZE TABLE LOG_RECORDS");
-// }
-// rs.close();
-// stmt.close();
-// return true;
-// }
-// rs.close();
-// }
-// stmt.close();
-// } catch (SQLException e) {
-// System.err.println(e);
-// e.printStackTrace();
-// } finally {
-// db.release(conn);
-// }
-// return false;
-// }
- @SuppressWarnings("resource")
- private int[] process(File f) {
- int ok = 0, total = 0;
- try {
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(INSERT_SQL);
- Reader r = f.getPath().endsWith(".gz")
- ? new InputStreamReader(new GZIPInputStream(new FileInputStream(f)))
- : new FileReader(f);
- LineNumberReader in = new LineNumberReader(r);
- String line;
- while ((line = in.readLine()) != null) {
- try {
- for (Loadable rec : buildRecords(line)) {
- rec.load(ps);
- if (rec instanceof LogRecord) {
- LogRecord lr = ((LogRecord)rec);
- if (!seq_set.get(lr.getRecordId())) {
- ps.executeUpdate();
- seq_set.set(lr.getRecordId());
- } else
- logger.debug("Duplicate record ignored: "+lr.getRecordId());
- } else {
- if (++nextid > set_end)
- nextid = set_start;
- ps.setLong(18, nextid);
- ps.executeUpdate();
- seq_set.set(nextid);
- }
- ps.clearParameters();
- ok++;
- }
- } catch (SQLException e) {
- logger.warn("PROV8003 Invalid value in record: "+line);
- logger.debug(e);
- e.printStackTrace();
- } catch (NumberFormatException e) {
- logger.warn("PROV8004 Invalid number in record: "+line);
- logger.debug(e);
- e.printStackTrace();
- } catch (ParseException e) {
- logger.warn("PROV8005 Invalid date in record: "+line);
- logger.debug(e);
- e.printStackTrace();
- } catch (Exception e) {
- logger.warn("PROV8006 Invalid pattern in record: "+line);
- logger.debug(e);
- e.printStackTrace();
- }
- total++;
- }
- in.close();
- ps.close();
- db.release(conn);
- conn = null;
- } catch (FileNotFoundException e) {
- logger.warn("PROV8007 Exception reading "+f+": "+e);
- } catch (IOException e) {
- logger.warn("PROV8007 Exception reading "+f+": "+e);
- } catch (SQLException e) {
- logger.warn("PROV8007 Exception reading "+f+": "+e);
- }
- return new int[] { ok, total };
- }
- private Loadable[] buildRecords(String line) throws ParseException {
- String[] pp = line.split("\\|");
- if (pp != null && pp.length >= 7) {
- String rtype = pp[1].toUpperCase();
- if (rtype.equals("PUB") && pp.length == 11) {
- // Fields are: date|PUB|pubid|feedid|requrl|method|ctype|clen|srcip|user|status
- return new Loadable[] { new PublishRecord(pp) };
- }
- if (rtype.equals("DEL") && pp.length == 12) {
- // Fields are: date|DEL|pubid|feedid|subid|requrl|method|ctype|clen|user|status|xpubid
- String[] subs = pp[4].split("\\s+");
- if (subs != null) {
- Loadable[] rv = new Loadable[subs.length];
- for (int i = 0; i < subs.length; i++) {
- // create a new record for each individual sub
- pp[4] = subs[i];
- rv[i] = new DeliveryRecord(pp);
- }
- return rv;
- }
- }
- if (rtype.equals("EXP") && pp.length == 11) {
- // Fields are: date|EXP|pubid|feedid|subid|requrl|method|ctype|clen|reason|attempts
- ExpiryRecord e = new ExpiryRecord(pp);
- if (e.getReason().equals("other"))
- logger.info("Invalid reason '"+pp[9]+"' changed to 'other' for record: "+e.getPublishId());
- return new Loadable[] { e };
- }
- if (rtype.equals("PBF") && pp.length == 12) {
- // Fields are: date|PBF|pubid|feedid|requrl|method|ctype|clen-expected|clen-received|srcip|user|error
- return new Loadable[] { new PubFailRecord(pp) };
- }
- if (rtype.equals("DLX") && pp.length == 7) {
- // Fields are: date|DLX|pubid|feedid|subid|clen-tosend|clen-sent
- return new Loadable[] { new DeliveryExtraRecord(pp) };
- }
- if (rtype.equals("LOG") && (pp.length == 19 || pp.length == 20)) {
- // Fields are: date|LOG|pubid|feedid|requrl|method|ctype|clen|type|feedFileid|remoteAddr|user|status|subid|fileid|result|attempts|reason|record_id
- return new Loadable[] { new LogRecord(pp) };
- }
- }
- logger.warn("PROV8002 bad record: "+line);
- return new Loadable[0];
- }
-
- /**
- * The LogfileLoader can be run stand-alone by invoking the main() method of this class.
- * @param a ignored
- * @throws InterruptedException
- */
- public static void main(String[] a) throws InterruptedException {
- LogfileLoader.getLoader();
- Thread.sleep(200000L);
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/PurgeLogDirTask.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/PurgeLogDirTask.java
deleted file mode 100644
index b705e6f8..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/PurgeLogDirTask.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.io.File;
-import java.util.Properties;
-import java.util.TimerTask;
-
-/**
- * This class provides a {@link TimerTask} that purges old logfiles
- * (older than the number of days specified by the com.att.research.datarouter.provserver.logretention property).
- * @author Robert Eby
- * @version $Id: PurgeLogDirTask.java,v 1.2 2013/07/05 13:48:05 eby Exp $
- */
-public class PurgeLogDirTask extends TimerTask {
- private static final long ONEDAY = 86400000L;
-
- private final String logdir;
- private final long interval;
-
- public PurgeLogDirTask() {
- Properties p = (new DB()).getProperties();
- logdir = p.getProperty("com.att.research.datarouter.provserver.accesslog.dir");
- String s = p.getProperty("com.att.research.datarouter.provserver.logretention", "30");
- long n = 30;
- try {
- n = Long.parseLong(s);
- } catch (NumberFormatException e) {
- // ignore
- }
- interval = n * ONEDAY;
- }
- @Override
- public void run() {
- try {
- File dir = new File(logdir);
- if (dir.exists()) {
- long exptime = System.currentTimeMillis() - interval;
- for (File logfile : dir.listFiles()) {
- if (logfile.lastModified() < exptime)
- logfile.delete();
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/RLEBitSet.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/RLEBitSet.java
deleted file mode 100644
index 58617414..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/RLEBitSet.java
+++ /dev/null
@@ -1,418 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-/**
- * This class provides operations similar to the standard Java {@link java.util.BitSet} class.
- * It is designed for bit sets where there are long runs of 1s and 0s; it is not appropriate
- * for sparsely populated bits sets. In addition, this class uses long
s rather
- * than int
s to represent the indices of the bits.
- *
- * @author Robert Eby
- * @version $Id$
- */
-public class RLEBitSet {
- /**
- * Used to represent a continues set of nbits 1 bits starting at start .
- */
- private class RLE implements Comparable {
- private final long start;
- private long nbits;
- public RLE(long from, long nbits) {
- this.start = from;
- this.nbits = (nbits > 0) ? nbits : 0;
- }
- /**
- * Returns the index of the first set bit in this RLE.
- * @return the index
- */
- public long firstBit() {
- return start;
- }
- /**
- * Returns the index of the last set bit in this RLE.
- * @return the index
- */
- public long lastBit() {
- return start+nbits-1;
- }
- public boolean intersects(RLE b2) {
- if (b2.lastBit() < this.firstBit())
- return false;
- if (b2.firstBit() > this.lastBit())
- return false;
- return true;
- }
- public boolean isSubset(RLE b2) {
- if (firstBit() < b2.firstBit())
- return false;
- if (firstBit() > b2.lastBit())
- return false;
- if (lastBit() < b2.firstBit())
- return false;
- if (lastBit() > b2.lastBit())
- return false;
- return true;
- }
- public RLE union(RLE b2) {
- RLE b1 = this;
- if (b1.firstBit() > b2.firstBit()) {
- b1 = b2;
- b2 = this;
- }
- long end = b1.lastBit();
- if (b2.lastBit() > b1.lastBit())
- end = b2.lastBit();
- return new RLE(b1.firstBit(), end-b1.firstBit()+1);
- }
- /**
- * Returns the number of bits set to {@code true} in this {@code RLE}.
- * @return the number of bits set to {@code true} in this {@code RLE}.
- */
- public int cardinality() {
- return (int) nbits;
- }
- @Override
- public int compareTo(RLE o) {
- if (this.equals(o))
- return 0;
- return (start < o.start) ? -1 : 1;
- }
- @Override
- public boolean equals(Object obj) {
- if (obj instanceof RLE) {
- RLE b = (RLE) obj;
- return (start == b.start) && (nbits == b.nbits);
- }
- return false;
- }
- @Override
- public int hashCode() {
- return new Long(start ^ nbits).hashCode();
- }
- @Override
- public String toString() {
- return "["+firstBit()+".."+lastBit()+"]";
- }
- }
- private SortedSet bitsets;
-
- /**
- * Creates a new bit set. All bits are initially false
.
- */
- public RLEBitSet() {
- bitsets = new TreeSet();
- }
- /**
- * Creates a new bit set, with bits set according to the value of s
.
- * @param s the initialization String
- */
- public RLEBitSet(String s) {
- bitsets = new TreeSet();
- set(s);
- }
- /**
- * Returns the "logical size" of this {@code RLEBitSet}: the index of the highest set bit
- * in the {@code RLEBitSet} plus one. Returns zero if the {@code RLEBitSet} contains no set bits.
- * @return the logical size of this {@code RLEBitSet}
- */
- public long length() {
- if (isEmpty())
- return 0;
- return bitsets.last().lastBit()+1;
- }
- /**
- * Returns the value of the bit with the specified index. The value is {@code true} if the bit
- * with the index bit is currently set in this BitSet; otherwise, the result is {@code false}.
- * @param bit the bit index
- * @return the value of the bit with the specified index
- */
- public boolean get(long bit) {
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- if (bit >= bs.firstBit() && bit <= bs.lastBit())
- return true;
- }
- }
- return false;
- }
- /**
- * Set one or more bits to true, based on the value of s
.
- * @param s the initialization String, which consists of a comma or space separated list of
- * non-negative numbers and ranges. An individual number represents the bit index to set.
- * A range (two numbers separated by a dash) causes all bit indexes between the two numbers
- * (inclusive) to be set.
- * @exception NumberFormatException - if a number is incorrectly formatted
- * @exception IndexOutOfBoundsException - if an index is negative
- */
- public void set(String s) throws NumberFormatException {
- s = s.trim();
- if (!s.isEmpty()) {
- for (String s2 : s.split("[, \n]+")) {
- if (s2.indexOf('-') >= 0) {
- String[] pp = s2.split("-");
- long f = Long.parseLong(pp[0]);
- long t = Long.parseLong(pp[1]);
- set(f, t+1);
- } else
- set(Long.parseLong(s2));
- }
- }
- }
- /**
- * Sets the bit at the specified index to {@code true}.
- * @param bit a bit index
- */
- public void set(long bit) {
- set(bit, bit+1);
- }
- /**
- * Sets the bits from the specified {@code from} (inclusive) to the
- * specified {@code to} (exclusive) to {@code true}.
- * @param from index of the first bit to be set
- * @param to index after the last bit to be set
- * @throws IndexOutOfBoundsException if {@code from} is negative,
- * or {@code to} is negative,
- * or {@code from} is larger than {@code to}
- */
- public void set(long from, long to) {
- checkRange(from, to);
- RLE newbits = new RLE(from, to-from);
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- if (bs.intersects(newbits)) {
- if (!newbits.isSubset(bs)) {
- bitsets.remove(bs);
- bitsets.add(newbits.union(bs));
- coalesce();
- }
- return;
- }
- }
- bitsets.add(newbits);
- }
- coalesce();
- }
- /**
- * Sets all of the bits in this BitSet to {@code false}.
- */
- public void clear() {
- synchronized (bitsets) {
- bitsets.clear();
- }
- }
- /**
- * Sets the bit specified by the index to {@code false}.
- * @param bit the index of the bit to be cleared
- */
- public void clear(long bit) {
- clear(bit, bit+1);
- }
- /**
- * Sets the bits from the specified {@code from} (inclusive) to the
- * specified {@code to} (exclusive) to {@code false}.
- * @param from index of the first bit to be cleared
- * @param to index after the last bit to be cleared
- * @throws IndexOutOfBoundsException if {@code from} is negative,
- * or {@code to} is negative,
- * or {@code from} is larger than {@code to}
- */
- public void clear(long from, long to) {
- checkRange(from, to);
- RLE newbits = new RLE(from, to-from);
- List newranges = new ArrayList();
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- if (bs.intersects(newbits)) {
- // preserve the bits that are not being cleared
- long len = newbits.firstBit() - bs.firstBit();
- if (len > 0)
- newranges.add(new RLE(bs.firstBit(), len));
- len = bs.lastBit() - newbits.lastBit();
- if (len > 0)
- newranges.add(new RLE(newbits.lastBit()+1, len));
- bs.nbits = 0;
- }
- }
- if (!newranges.isEmpty()) {
- for (RLE bs : newranges) {
- bitsets.add(bs);
- }
- }
- }
- coalesce();
- }
- /** Combine abutting RLEBitSets, and remove 0 length RLEBitSets. */
- private void coalesce() {
- RLE last = null;
- synchronized (bitsets) {
- Iterator iter = bitsets.iterator();
- while (iter.hasNext()) {
- RLE bs = iter.next();
- if (last != null && (last.lastBit()+1 == bs.firstBit())) {
- last.nbits += bs.nbits;
- iter.remove();
- } else if (bs.nbits == 0) {
- iter.remove();
- } else {
- last = bs;
- }
- }
- }
- }
- /**
- * Checks that fromIndex ... toIndex is a valid range of bit indices.
- */
- private static void checkRange(long from, long to) {
- if (from < 0)
- throw new IndexOutOfBoundsException("fromIndex < 0: " + from);
- if (to < 0)
- throw new IndexOutOfBoundsException("toIndex < 0: " + to);
- if (from > to)
- throw new IndexOutOfBoundsException("fromIndex: " + from + " > toIndex: " + to);
- }
- /**
- * Performs a logical AND of this target bit set with the argument bit set.
- * This bit set is modified so that each bit in it has the value {@code true} if and only if
- * it both initially had the value {@code true} and the corresponding bit in the bit set
- * argument also had the value {@code true}.
- * @param set a {@code RLEBitSet}
- */
- public void and(RLEBitSet set) {
- long last = 0;
- synchronized (set.bitsets) {
- for (RLE bs : set.bitsets) {
- clear(last, bs.start);
- last = bs.start + bs.nbits;
- }
- }
- clear(last, Long.MAX_VALUE);
- }
- /**
- * Clears all of the bits in this {@code RLEBitSet} whose corresponding bit is set in
- * the specified {@code RLEBitSet}.
- * @param set the {@code RLEBitSet} with which to mask this {@code RLEBitSet}
- */
- public void andNot(RLEBitSet set) {
- synchronized (set.bitsets) {
- for (RLE bs : set.bitsets) {
- clear(bs.start, bs.start + bs.nbits);
- }
- }
- }
- /**
- * Returns true if this {@code RLEBitSet} contains no bits that are set
- * to {@code true}.
- *
- * @return boolean indicating whether this {@code BitSet} is empty
- */
- public boolean isEmpty() {
- return bitsets.isEmpty();
- }
- /**
- * Returns the number of bits set to {@code true} in this {@code RLEBitSet}.
- * @return the number of bits set to {@code true} in this {@code RLEBitSet}.
- */
- public int cardinality() {
- int n = 0;
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- n += bs.cardinality();
- }
- }
- return n;
- }
- /**
- * Cloning this RLEBitSet produces a new RLEBitSet that is equal to it. The clone of the
- * bit set is another bit set that has exactly the same bits set to true as this bit set.
- * @return a clone of this bit set
- */
- public Object clone() {
- RLEBitSet rv = new RLEBitSet();
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- rv.bitsets.add(new RLE(bs.start, bs.nbits));
- }
- }
- return rv;
- }
- /**
- * Returns a string representation of this bit set, using the same notation as is required for
- * the String constructor. For every index for which this {@code RLEBitSet} contains a bit in
- * the set state, the decimal representation of that index is included in the result. Such
- * indices are listed in order from lowest to highest, separated by ",". Ranges of set bits are
- * indicated by lobit -hibit .
- * @return the String
- */
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- String prefix = "";
- synchronized (bitsets) {
- for (RLE bs : bitsets) {
- sb.append(prefix);
- prefix = ",";
- long s = bs.firstBit();
- long e = bs.lastBit();
- sb.append(s);
- if (s != e)
- sb.append('-').append(e);
- }
- }
- return sb.toString();
- }
- /**
- * Return an Iterator which provides pairs of {@code Long}s representing the beginning and
- * ending index of a range of set bits in this {@code RLEBitSet}.
- * @return the Iterator
- */
- public Iterator getRangeIterator() {
- return new Iterator() {
- private Iterator i = bitsets.iterator();
-
- @Override
- public boolean hasNext() {
- return i.hasNext();
- }
-
- @Override
- public Long[] next() {
- RLE bs = i.next();
- return new Long[] { bs.firstBit(), bs.lastBit() };
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- };
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/ThrottleFilter.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/ThrottleFilter.java
deleted file mode 100644
index 6eb866c8..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/ThrottleFilter.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.Vector;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import com.att.research.datarouter.provisioning.beans.Parameters;
-
-import org.apache.log4j.Logger;
-import org.eclipse.jetty.continuation.Continuation;
-import org.eclipse.jetty.continuation.ContinuationSupport;
-import org.eclipse.jetty.server.AbstractHttpConnection;
-import org.eclipse.jetty.server.Request;
-
-/**
- * This filter checks /publish requests to the provisioning server to allow ill-behaved publishers to be throttled.
- * It is configured via the provisioning parameter THROTTLE_FILTER.
- * The THROTTLE_FILTER provisioning parameter can have these values:
- *
- * (no value) filter disabled
- * off filter disabled
- * N[,M[,action]] set N, M, and action (used in the algorithm below).
- * Action is drop or throttle .
- * If M is missing, it defaults to 5 minutes.
- * If the action is missing, it defaults to drop .
- *
- *
- *
- * The action is triggered iff:
- *
- * the filter is enabled, and
- * N /publish requests come to the provisioning server in M minutes
- *
- * from the same IP address
- * for the same feed
- * lacking the Expect: 100-continue header
- *
- *
- *
- * The action that can be performed (if triggered) are:
- *
- * drop - the connection is dropped immediately.
- * throttle - [not supported] the connection is put into a low priority queue with all other throttled connections.
- * These are then processed at a slower rate. Note: this option does not work correctly, and is disabled.
- * The only action that is supported is drop .
- *
- *
- *
- * @author Robert Eby
- * @version $Id: ThrottleFilter.java,v 1.2 2014/03/12 19:45:41 eby Exp $
- */
-public class ThrottleFilter extends TimerTask implements Filter {
- public static final int DEFAULT_N = 10;
- public static final int DEFAULT_M = 5;
- public static final String THROTTLE_MARKER = "com.att.research.datarouter.provisioning.THROTTLE_MARKER";
- private static final String JETTY_REQUEST = "org.eclipse.jetty.server.Request";
- private static final long ONE_MINUTE = 60000L;
- private static final int ACTION_DROP = 0;
- private static final int ACTION_THROTTLE = 1;
-
- // Configuration
- private static boolean enabled = false; // enabled or not
- private static int n_requests = 0; // number of requests in M minutes
- private static int m_minutes = 0; // sampling period
- private static int action = ACTION_DROP; // action to take (throttle or drop)
-
- private static Logger logger = Logger.getLogger("com.att.research.datarouter.provisioning.internal");
- private static Map map = new HashMap();
- private static final Timer rolex = new Timer();
-
- @Override
- public void init(FilterConfig arg0) throws ServletException {
- configure();
- rolex.scheduleAtFixedRate(this, 5*60000L, 5*60000L); // Run once every 5 minutes to clean map
- }
-
- /**
- * Configure the throttle. This should be called from BaseServlet.provisioningParametersChanged(), to make sure it stays up to date.
- */
- public static void configure() {
- Parameters p = Parameters.getParameter(Parameters.THROTTLE_FILTER);
- if (p != null) {
- try {
- Class.forName(JETTY_REQUEST);
- String v = p.getValue();
- if (v != null && !v.equals("off")) {
- String[] pp = v.split(",");
- if (pp != null) {
- n_requests = (pp.length > 0) ? getInt(pp[0], DEFAULT_N) : DEFAULT_N;
- m_minutes = (pp.length > 1) ? getInt(pp[1], DEFAULT_M) : DEFAULT_M;
- action = (pp.length > 2 && pp[2] != null && pp[2].equalsIgnoreCase("throttle")) ? ACTION_THROTTLE : ACTION_DROP;
- enabled = true;
- // ACTION_THROTTLE is not currently working, so is not supported
- if (action == ACTION_THROTTLE) {
- action = ACTION_DROP;
- logger.info("Throttling is not currently supported; action changed to DROP");
- }
- logger.info("ThrottleFilter is ENABLED for /publish requests; N="+n_requests+", M="+m_minutes+", Action="+action);
- return;
- }
- }
- } catch (ClassNotFoundException e) {
- logger.warn("Class "+JETTY_REQUEST+" is not available; this filter requires Jetty.");
- }
- }
- logger.info("ThrottleFilter is DISABLED for /publish requests.");
- enabled = false;
- map.clear();
- }
- private static int getInt(String s, int deflt) {
- try {
- return Integer.parseInt(s);
- } catch (NumberFormatException x) {
- return deflt;
- }
- }
- @Override
- public void destroy() {
- rolex.cancel();
- map.clear();
- }
-
- @Override
- public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
- throws IOException, ServletException
- {
- if (enabled && action == ACTION_THROTTLE) {
- throttleFilter((HttpServletRequest) request, (HttpServletResponse) response, chain);
- } else if (enabled) {
- dropFilter((HttpServletRequest) request, (HttpServletResponse) response, chain);
- } else {
- chain.doFilter(request, response);
- }
- }
- public void dropFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
- throws IOException, ServletException
- {
- int rate = getRequestRate((HttpServletRequest) request);
- if (rate >= n_requests) {
- // drop request - only works under Jetty
- String m = String.format("Dropping connection: %s %d bad connections in %d minutes", getConnectionId((HttpServletRequest) request), rate, m_minutes);
- logger.info(m);
- Request base_request = (request instanceof Request)
- ? (Request) request
- : AbstractHttpConnection.getCurrentConnection().getRequest();
- base_request.getConnection().getEndPoint().close();
- } else {
- chain.doFilter(request, response);
- }
- }
- public void throttleFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)
- throws IOException, ServletException
- {
- // throttle request
- String id = getConnectionId((HttpServletRequest) request);
- int rate = getRequestRate((HttpServletRequest) request);
- Object results = request.getAttribute(THROTTLE_MARKER);
- if (rate >= n_requests && results == null) {
- String m = String.format("Throttling connection: %s %d bad connections in %d minutes", getConnectionId((HttpServletRequest) request), rate, m_minutes);
- logger.info(m);
- Continuation continuation = ContinuationSupport.getContinuation(request);
- continuation.suspend();
- register(id, continuation);
- continuation.undispatch();
- } else {
- chain.doFilter(request, response);
- @SuppressWarnings("resource")
- InputStream is = request.getInputStream();
- byte[] b = new byte[4096];
- int n = is.read(b);
- while (n > 0) {
- n = is.read(b);
- }
- resume(id);
- }
- }
- private Map> suspended_requests = new HashMap>();
- private void register(String id, Continuation continuation) {
- synchronized (suspended_requests) {
- List list = suspended_requests.get(id);
- if (list == null) {
- list = new ArrayList();
- suspended_requests.put(id, list);
- }
- list.add(continuation);
- }
- }
- private void resume(String id) {
- synchronized (suspended_requests) {
- List list = suspended_requests.get(id);
- if (list != null) {
- // when the waited for event happens
- Continuation continuation = list.remove(0);
- continuation.setAttribute(ThrottleFilter.THROTTLE_MARKER, new Object());
- continuation.resume();
- }
- }
- }
-
- /**
- * Return a count of number of requests in the last M minutes, iff this is a "bad" request.
- * If the request has been resumed (if it contains the THROTTLE_MARKER) it is considered good.
- * @param request the request
- * @return number of requests in the last M minutes, 0 means it is a "good" request
- */
- private int getRequestRate(HttpServletRequest request) {
- String expecthdr = request.getHeader("Expect");
- if (expecthdr != null && expecthdr.equalsIgnoreCase("100-continue"))
- return 0;
-
- String key = getConnectionId(request);
- synchronized (map) {
- Counter cnt = map.get(key);
- if (cnt == null) {
- cnt = new Counter();
- map.put(key, cnt);
- }
- int n = cnt.getRequestRate();
- return n;
- }
- }
-
- public class Counter {
- private List times = new Vector(); // a record of request times
- public int prune() {
- try {
- long n = System.currentTimeMillis() - (m_minutes * ONE_MINUTE);
- long t = times.get(0);
- while (t < n) {
- times.remove(0);
- t = times.get(0);
- }
- } catch (IndexOutOfBoundsException e) {
- // ignore
- }
- return times.size();
- }
- public int getRequestRate() {
- times.add(System.currentTimeMillis());
- return prune();
- }
- }
-
- /**
- * Identify a connection by endpoint IP address, and feed ID.
- */
- private String getConnectionId(HttpServletRequest req) {
- return req.getRemoteAddr() + "/" + getFeedId(req);
- }
- private int getFeedId(HttpServletRequest req) {
- String path = req.getPathInfo();
- if (path == null || path.length() < 2)
- return -1;
- path = path.substring(1);
- int ix = path.indexOf('/');
- if (ix < 0 || ix == path.length()-1)
- return -2;
- try {
- int feedid = Integer.parseInt(path.substring(0, ix));
- return feedid;
- } catch (NumberFormatException e) {
- return -1;
- }
- }
-
- @Override
- public void run() {
- // Once every 5 minutes, go through the map, and remove empty entrys
- for (Object s : map.keySet().toArray()) {
- synchronized (map) {
- Counter c = map.get(s);
- if (c.prune() <= 0)
- map.remove(s);
- }
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/URLUtilities.java b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/URLUtilities.java
deleted file mode 100644
index c1793e52..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/URLUtilities.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.provisioning.utils;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Arrays;
-
-import com.att.research.datarouter.provisioning.BaseServlet;
-
-/**
- * Utility functions used to generate the different URLs used by the Data Router.
- *
- * @author Robert Eby
- * @version $Id: URLUtilities.java,v 1.2 2014/03/12 19:45:41 eby Exp $
- */
-public class URLUtilities {
- /**
- * Generate the URL used to access a feed.
- * @param feedid the feed id
- * @return the URL
- */
- public static String generateFeedURL(int feedid) {
- return "https://" + BaseServlet.prov_name + "/feed/" + feedid;
- }
- /**
- * Generate the URL used to publish to a feed.
- * @param feedid the feed id
- * @return the URL
- */
- public static String generatePublishURL(int feedid) {
- return "https://" + BaseServlet.prov_name + "/publish/" + feedid;
- }
- /**
- * Generate the URL used to subscribe to a feed.
- * @param feedid the feed id
- * @return the URL
- */
- public static String generateSubscribeURL(int feedid) {
- return "https://" + BaseServlet.prov_name + "/subscribe/" + feedid;
- }
- /**
- * Generate the URL used to access a feed's logs.
- * @param feedid the feed id
- * @return the URL
- */
- public static String generateFeedLogURL(int feedid) {
- return "https://" + BaseServlet.prov_name + "/feedlog/" + feedid;
- }
- /**
- * Generate the URL used to access a subscription.
- * @param subid the subscription id
- * @return the URL
- */
- public static String generateSubscriptionURL(int subid) {
- return "https://" + BaseServlet.prov_name + "/subs/" + subid;
- }
- /**
- * Generate the URL used to access a subscription's logs.
- * @param subid the subscription id
- * @return the URL
- */
- public static String generateSubLogURL(int subid) {
- return "https://" + BaseServlet.prov_name + "/sublog/" + subid;
- }
- /**
- * Generate the URL used to access the provisioning data on the peer POD.
- * @return the URL
- */
- public static String generatePeerProvURL() {
- return "https://" + getPeerPodName() + "/internal/prov";
- }
- /**
- * Generate the URL used to access the logfile data on the peer POD.
- * @return the URL
- */
- public static String generatePeerLogsURL() {
- //Fixes for Itrack ticket - DATARTR-4#Fixing if only one Prov is configured, not to give exception to fill logs.
- String peerPodUrl = getPeerPodName();
- if(peerPodUrl.equals("") || peerPodUrl.equals(null)){
- return "";
- }
-
- return "https://" + peerPodUrl + "/internal/drlogs/";
- }
- /**
- * Return the real (non CNAME) version of the peer POD's DNS name.
- * @return the name
- */
- public static String getPeerPodName() {
- if (other_pod == null) {
- String this_pod = "";
- try {
- this_pod = InetAddress.getLocalHost().getHostName();
- System.out.println("this_pod: "+this_pod);
- } catch (UnknownHostException e) {
- this_pod = "";
- }
- System.out.println("ALL PODS: "+Arrays.asList(BaseServlet.getPods()));
- for (String pod : BaseServlet.getPods()) {
- if (!pod.equals(this_pod))
- other_pod = pod;
- }
- }
- return other_pod;
- }
- private static String other_pod;
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/package.html
deleted file mode 100644
index 7855bb40..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/provisioning/utils/package.html
+++ /dev/null
@@ -1,30 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-This package provide various helper classes used by the provisioning server.
-
-
-
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/DailyLatencyReport.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/DailyLatencyReport.java
deleted file mode 100644
index 63d612f9..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/DailyLatencyReport.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeSet;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a daily per feed latency report. The report is a .csv file containing the following columns:
- *
- * date the date for this record
- * feedid the Feed ID for this record
- * minsize the minimum size of all files published on this feed and date
- * maxsize the maximum size of all files published on this feed and date
- * avgsize the average size of all files published on this feed and date
- * minlat the minimum latency in delivering this feed to all subscribers (in ms)
- * maxlat the maximum latency in delivering this feed to all subscribers (in ms)
- * avglat the average latency in delivering this feed to all subscribers (in ms)
- * fanout the average number of subscribers this feed was delivered to
- *
- *
- * In the context of this report, latency is defined as the value
- * (De - Ps )
- * where:
- *
- * Ps is the time that the publication of the file to the node starts.
- * De is the time that the delivery of the file to the subscriber ends.
- *
- * @author Robert P. Eby
- * @version $Id: DailyLatencyReport.java,v 1.2 2013/11/06 16:23:54 eby Exp $
- */
-public class DailyLatencyReport extends ReportBase {
- private static final String SELECT_SQL =
- "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" +
- " where EVENT_TIME >= ? and EVENT_TIME <= ?";
-
- private class Job {
- public long pubtime = 0;
- public long clen = 0;
- public List deltime = new ArrayList();
- public long minLatency() {
- long n = deltime.isEmpty() ? 0 : Long.MAX_VALUE;
- for (Long l : deltime)
- n = Math.min(n, l-pubtime);
- return n;
- }
- public long maxLatency() {
- long n = 0;
- for (Long l : deltime)
- n = Math.max(n, l-pubtime);
- return n;
- }
- public long totalLatency() {
- long n = 0;
- for (Long l : deltime)
- n += (l-pubtime);
- return n;
- }
- }
- private class Counters {
- public final String date;
- public final int feedid;
- public final Map jobs;
- public Counters(String d, int fid) {
- date = d;
- feedid = fid;
- jobs = new HashMap();
- }
- public void addEvent(long etime, String type, String id, String fid, long clen) {
- Job j = jobs.get(id);
- if (j == null) {
- j = new Job();
- jobs.put(id, j);
- }
- if (type.equals("pub")) {
- j.pubtime = getPstart(id);
- j.clen = clen;
- } else if (type.equals("del")) {
- j.deltime.add(etime);
- }
- }
- @Override
- public String toString() {
- long minsize = Long.MAX_VALUE, maxsize = 0, avgsize = 0;
- long minl = Long.MAX_VALUE, maxl = 0;
- long fanout = 0, totall = 0, totaln = 0;
- for (Job j : jobs.values()) {
- minsize = Math.min(minsize, j.clen);
- maxsize = Math.max(maxsize, j.clen);
- avgsize += j.clen;
- minl = Math.min(minl, j.minLatency());
- maxl = Math.max(maxl, j.maxLatency());
- totall += j.totalLatency();
- totaln += j.deltime.size();
- fanout += j.deltime.size();
- }
- if (jobs.size() > 0) {
- avgsize /= jobs.size();
- fanout /= jobs.size();
- }
- long avgl = (totaln > 0) ? (totall / totaln) : 0;
- return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + "," + avgl + "," + fanout;
- }
- }
- private long getPstart(String t) {
- if (t.indexOf('.') > 0)
- t = t.substring(0, t.indexOf('.'));
- return Long.parseLong(t);
- }
-
- @Override
- public void run() {
- Map map = new HashMap();
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String id = rs.getString("PUBLISH_ID");
- int feed = rs.getInt("FEEDID");
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- String fid = rs.getString("FEED_FILEID");
- long clen = rs.getLong("CONTENT_LENGTH");
- String date = sdf.format(new Date(getPstart(id)));
- String key = date + "," + feed;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, feed);
- map.put(key, c);
- }
- c.addEvent(etime, type, id, fid, clen);
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
- os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");
- for (String key : new TreeSet(map.keySet())) {
- Counters c = map.get(key);
- os.println(c.toString());
- }
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/FeedReport.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/FeedReport.java
deleted file mode 100644
index 9fe7e27f..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/FeedReport.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.LineNumberReader;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a feeds report. The report is a .CSV file.
- *
- * @author Robert P. Eby
- * @version $Id: FeedReport.java,v 1.2 2013/11/06 16:23:55 eby Exp $
- */
-public class FeedReport extends ReportBase {
- private static final String SELECT_SQL =
- // Note to use the time in the publish_id, use date(from_unixtime(substring(publish_id, 1, 10)))
- // To just use month, substring(from_unixtime(event_time div 1000), 1, 7)
- "select date(from_unixtime(event_time div 1000)) as date, type, feedid, delivery_subid, count(*) as count" +
- " from LOG_RECORDS" +
- " where type = 'pub' or type = 'del'" +
- " group by date, type, feedid, delivery_subid";
- private static final String SELECT_SQL_OLD =
- "select PUBLISH_ID, TYPE, FEEDID, DELIVERY_SUBID from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ?";
-
- @Override
- public void run() {
- boolean alg1 = true;
- JSONObject jo = new JSONObject();
- long start = System.currentTimeMillis();
- StringBuilder sb = new StringBuilder();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
-// ps.setLong(1, from);
-// ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- if (alg1) {
- String date = rs.getString("date");
- String type = rs.getString("type");
- int feedid = rs.getInt("feedid");
- int subid = type.equals("del") ? rs.getInt("delivery_subid") : 0;
- int count = rs.getInt("count");
- sb.append(date + "," + type + "," + feedid + "," + subid + "," + count + "\n");
- } else {
- String date = rs.getString("date");
- JSONObject datemap = jo.optJSONObject(date);
- if (datemap == null) {
- datemap = new JSONObject();
- jo.put(date, datemap);
- }
- int feed = rs.getInt("FEEDID");
- JSONObject feedmap = datemap.optJSONObject(""+feed);
- if (feedmap == null) {
- feedmap = new JSONObject();
- feedmap.put("pubcount", 0);
- datemap.put(""+feed, feedmap);
- }
- String type = rs.getString("TYPE");
- int count = rs.getInt("count");
- if (type.equals("pub")) {
- feedmap.put("pubcount", count);
- } else if (type.equals("del")) {
- String subid = ""+rs.getInt("DELIVERY_SUBID");
- feedmap.put(subid, count);
- }
- }
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
- if (alg1) {
- os.print("date,type,feedid,subid,count\n");
- os.print(sb.toString());
- } else {
- os.println(toHTML(jo));
- }
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- }
- }
-
- public void run2() {
- JSONObject jo = new JSONObject();
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL_OLD);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ps.setFetchSize(100000);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String id = rs.getString("PUBLISH_ID");
- String date = sdf.format(new Date(getPstart(id)));
- JSONObject datemap = jo.optJSONObject(date);
- if (datemap == null) {
- datemap = new JSONObject();
- jo.put(date, datemap);
- }
- int feed = rs.getInt("FEEDID");
- JSONObject feedmap = datemap.optJSONObject(""+feed);
- if (feedmap == null) {
- feedmap = new JSONObject();
- feedmap.put("pubcount", 0);
- datemap.put(""+feed, feedmap);
- }
- String type = rs.getString("TYPE");
- if (type.equals("pub")) {
- try {
- int n = feedmap.getInt("pubcount");
- feedmap.put("pubcount", n+1);
- } catch (JSONException e) {
- feedmap.put("pubcount", 1);
- }
- } else if (type.equals("del")) {
- String subid = ""+rs.getInt("DELIVERY_SUBID");
- try {
- int n = feedmap.getInt(subid);
- feedmap.put(subid, n+1);
- } catch (JSONException e) {
- feedmap.put(subid, 1);
- }
- }
- }
- rs.close();
- ps.close();
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
- os.println(toHTML(jo));
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- }
- }
- private long getPstart(String t) {
- if (t.indexOf('.') > 0)
- t = t.substring(0, t.indexOf('.'));
- return Long.parseLong(t);
- }
- @SuppressWarnings("unused")
- private static String toHTMLNested(JSONObject jo) {
- StringBuilder s = new StringBuilder();
- s.append("\n");
- s.append("Date Feeds \n");
- String[] dates = JSONObject.getNames(jo);
- Arrays.sort(dates);
- for (int i = dates.length-1; i >= 0; i--) {
- String date = dates[i];
- JSONObject j2 = jo.getJSONObject(date);
- String[] feeds = JSONObject.getNames(j2);
- Arrays.sort(feeds);
- s.append(""+date+" ");
- s.append(feeds.length).append(feeds.length > 1 ? " Feeds\n" : " Feed\n");
- s.append("\n");
- s.append("Feed ID Publish Count Subscriptions \n");
- for (String feed : feeds) {
- JSONObject j3 = j2.getJSONObject(feed);
- String[] subs = JSONObject.getNames(j3);
- Arrays.sort(subs);
- s.append(""+feed+" ");
- s.append(""+j3.getInt("pubcount")+" ");
- int scnt = j3.length()-1;
- s.append("").append(scnt).append(" Subcription");
- if (scnt > 1)
- s.append("s");
- s.append("\n");
- s.append("Sub ID Delivery Count \n");
- for (String sub : subs) {
- if (!sub.equals("pubcount")) {
- s.append(""+sub+" ");
- s.append(""+j3.getInt(sub)+" ");
- s.append(" \n");
- }
- }
- s.append("
\n");
-
- s.append(" \n");
- }
- s.append("
\n");
- s.append(" \n");
- }
- s.append("
\n");
- return s.toString();
- }
- private static String toHTML(JSONObject jo) {
- StringBuilder s = new StringBuilder();
- s.append("\n");
- s.append("Date Feeds Feed ID Publish Count Subs Sub ID Delivery Count \n");
- String[] dates = JSONObject.getNames(jo);
- Arrays.sort(dates);
- for (int i = dates.length-1; i >= 0; i--) {
- String date = dates[i];
- JSONObject j2 = jo.getJSONObject(date);
- int rc1 = countrows(j2);
- String[] feeds = JSONObject.getNames(j2);
- Arrays.sort(feeds);
- s.append("")
- .append(date)
- .append(" ");
- s.append("")
- .append(feeds.length)
- .append(" ");
- String px1 = "";
- for (String feed : feeds) {
- JSONObject j3 = j2.getJSONObject(feed);
- int pubcount = j3.getInt("pubcount");
- int subcnt = j3.length()-1;
- int rc2 = (subcnt < 1) ? 1 : subcnt;
- String[] subs = JSONObject.getNames(j3);
- Arrays.sort(subs);
- s.append(px1)
- .append("")
- .append(feed)
- .append(" ");
- s.append("")
- .append(pubcount)
- .append(" ");
- s.append("")
- .append(subcnt)
- .append(" ");
- String px2 = "";
- for (String sub : subs) {
- if (!sub.equals("pubcount")) {
- s.append(px2);
- s.append(""+sub+" ");
- s.append(""+j3.getInt(sub)+" ");
- s.append(" \n");
- px2 = "";
- }
- }
- if (px2.equals(""))
- s.append(" \n");
- px1 = "";
- }
- }
- s.append("
\n");
- return s.toString();
- }
- private static int countrows(JSONObject x) {
- int n = 0;
- for (String feed : JSONObject.getNames(x)) {
- JSONObject j3 = x.getJSONObject(feed);
- int subcnt = j3.length()-1;
- int rc2 = (subcnt < 1) ? 1 : subcnt;
- n += rc2;
- }
- return (n > 0) ? n : 1;
- }
-
- /**
- * Convert a .CSV file (as generated by the normal FeedReport mechanism) to an HTML table.
- * @param args
- */
- public static void main(String[] args) {
- int rtype = 0; // 0 -> day, 1 -> week, 2 -> month, 3 -> year
- String infile = null;
- String outfile = null;
- for (int i = 0; i < args.length; i++) {
- if (args[i].equals("-t")) {
- switch (args[++i].charAt(0)) {
- case 'w': rtype = 1; break;
- case 'm': rtype = 2; break;
- case 'y': rtype = 3; break;
- default: rtype = 0; break;
- }
- } else if (infile == null) {
- infile = args[i];
- } else if (outfile == null) {
- outfile = args[i];
- }
- }
- if (infile == null) {
- System.err.println("usage: FeedReport [ -t ] [ ] [ ]");
- System.exit(1);
- }
- try {
- JSONObject jo = new JSONObject();
- LineNumberReader lr = new LineNumberReader(new FileReader(infile));
- String line = lr.readLine();
- while (line != null) {
- String[] tt = line.split(",");
- if (tt[0].startsWith("2")) {
- String date = tt[0];
- switch (rtype) {
- case 1:
- String[] xx = date.split("-");
- Calendar cal = new GregorianCalendar(new Integer(xx[0]), new Integer(xx[1])-1, new Integer(xx[2]));
- date = xx[0] + "-W" + cal.get(Calendar.WEEK_OF_YEAR);
- break;
- case 2: date = date.substring(0, 7); break;
- case 3: date = date.substring(0, 4); break;
- }
- JSONObject datemap = jo.optJSONObject(date);
- if (datemap == null) {
- datemap = new JSONObject();
- jo.put(date, datemap);
- }
- int feed = Integer.parseInt(tt[2]);
- JSONObject feedmap = datemap.optJSONObject(""+feed);
- if (feedmap == null) {
- feedmap = new JSONObject();
- feedmap.put("pubcount", 0);
- datemap.put(""+feed, feedmap);
- }
- String type = tt[1];
- int count = Integer.parseInt(tt[4]);
- if (type.equals("pub")) {
- try {
- int n = feedmap.getInt("pubcount");
- feedmap.put("pubcount", n+count);
- } catch (JSONException e) {
- feedmap.put("pubcount", count);
- }
- } else if (type.equals("del")) {
- String subid = tt[3];
- try {
- int n = feedmap.getInt(subid);
- feedmap.put(subid, n+count);
- } catch (JSONException e) {
- feedmap.put(subid, count);
- }
- }
- }
- line = lr.readLine();
- }
- lr.close();
- String t = toHTML(jo);
- switch (rtype) {
- case 1: t = t.replaceAll("Date ", "Week "); break;
- case 2: t = t.replaceAll("Date ", "Month "); break;
- case 3: t = t.replaceAll("Date ", "Year "); break;
- }
- System.out.println(t);
- } catch (Exception e) {
- System.err.println(e);
- e.printStackTrace();
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/LatencyReport.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/LatencyReport.java
deleted file mode 100644
index 96e096e9..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/LatencyReport.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a per-file latency report. It reports on the details related to one file published
- * on one feed. This report can be further reduced in order to generate more specific reports
- * based on feed ID or node name. The report is a .csv file containing the following columns:
- *
- * recordid the unique record ID assigned to a particular incoming feed
- * feedid the Feed ID for this record
- * uri the URI of the file delivered
- * size the size of the file delivered
- * min the minimum latency in delivering this feed to a subscriber (in ms)
- * max the maximum latency in delivering this feed to a subscriber (in ms)
- * avg the average latency in delivering this feed to all subscribers (in ms)
- * fanout the number of subscribers this feed was delivered to
- *
- *
- * @author Robert P. Eby
- * @version $Id: LatencyReport.java,v 1.1 2013/10/28 18:06:53 eby Exp $
- */
-public class LatencyReport extends ReportBase {
- private static final String SELECT_SQL =
- "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" +
- " where EVENT_TIME >= ? and EVENT_TIME <= ? order by PUBLISH_ID, EVENT_TIME";
-
- private class Event {
- public final String type;
- public final long time;
- public Event(String t, long tm) {
- type = t;
- time = tm;
- }
- }
- private class Counters {
- public final String id;
- public final int feedid;
- public final long clen;
- public final String fileid;
- public final List events;
- public Counters(String i, int fid, long c, String s) {
- id = i;
- feedid = fid;
- clen = c;
- fileid = s;
- events = new ArrayList();
- }
- private long pubtime;
- public void addEvent(String t, long tm) {
- events.add(new Event(t, tm));
- if (t.equals("pub"))
- pubtime = tm;
- }
- public long min() {
- long min = Long.MAX_VALUE;
- for (Event e : events) {
- if (e.type.equals("del")) {
- min = Math.min(min, e.time - pubtime);
- }
- }
- return min;
- }
- public long max() {
- long max = 0;
- for (Event e : events) {
- if (e.type.equals("del")) {
- max = Math.max(max, e.time - pubtime);
- }
- }
- return max;
- }
- public long avg() {
- long total = 0, c = 0;
- for (Event e : events) {
- if (e.type.equals("del")) {
- total += e.time - pubtime;
- c++;
- }
- }
- return (c == 0) ? 0 : total/c;
- }
- public int fanout() {
- int n = 0;
- for (Event e : events) {
- if (e.type.equals("del")) {
- n++;
- }
- }
- return n;
- }
- @Override
- public String toString() {
- return feedid + "," + fileid + "," + clen + "," + min() + "," + max() + "," + avg() + "," + fanout();
- }
- }
-
- @Override
- public void run() {
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- PrintWriter os = new PrintWriter(outfile);
- os.println("recordid,feedid,uri,size,min,max,avg,fanout");
- Counters c = null;
- while (rs.next()) {
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- String id = rs.getString("PUBLISH_ID");
- String fid = rs.getString("FEED_FILEID");
- int feed = rs.getInt("FEEDID");
- long clen = rs.getLong("CONTENT_LENGTH");
- if (c != null && !id.equals(c.id)) {
- String line = id + "," + c.toString();
- os.println(line);
- c = null;
- }
- if (c == null) {
- c = new Counters(id, feed, clen, fid);
- }
- if (feed != c.feedid)
- System.err.println("Feed ID mismatch, "+feed+" <=> "+c.feedid);
- if (clen != c.clen)
- System.err.println("Cont Len mismatch, "+clen+" <=> "+c.clen);
-// if (fid != c.fileid)
-// System.err.println("File ID mismatch, "+fid+" <=> "+c.fileid);
- c.addEvent(type, etime);
- }
- rs.close();
- ps.close();
- db.release(conn);
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/Report.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/Report.java
deleted file mode 100644
index bd64e0eb..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/Report.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.lang.reflect.Constructor;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
-import java.util.TimeZone;
-
-/**
- * This class provides a CLI to generate any of the reports defined in this package.
- *
- * @author Robert P. Eby
- * @version $Id: Report.java,v 1.2 2013/11/06 16:23:55 eby Exp $
- */
-public class Report {
- /**
- * Generate .csv report files from the database. Usage:
- *
- * java com.att.research.datarouter.reports.Report [ -t type ] [ -o outfile ] [ fromdate [ todate ]]
- *
- * type should be volume for a {@link VolumeReport},
- * feed for a {@link FeedReport},
- * latency for a {@link LatencyReport}, or
- * dailyLatency for a {@link DailyLatencyReport}.
- * If outfile is not specified, the report goes into a file /tmp/nnnnnnnnnnnnn.csv ,
- * where nnnnnnnnnnnnn is the current time in milliseconds.
- * If from and to are not specified, then the report is limited to the last weeks worth of data.
- * from can be the keyword ALL to specify all data in the DB, or the keyword yesterday .
- * Otherwise, from and to should match the pattern YYYY-MM-DD.
- * @param args the command line arguments
- */
- public static void main(String[] args) {
- ReportBase report = new VolumeReport();
- String outfile = "/tmp/" + System.currentTimeMillis() + ".csv";
- String from = null, to = null;
-
- for (int i = 0; i < args.length; i++) {
- if (args[i].equals("-?")) {
- System.err.println("usage: java com.att.research.datarouter.reports.Report [ -t type ] [ -o outfile ] [ fromdate [ todate ]]");
- System.exit(0);
- } else if (args[i].equals("-o")) {
- if (++i < args.length) {
- outfile = args[i];
- }
- } else if (args[i].equals("-t")) {
- if (++i < args.length) {
- String base = args[i];
- base = Character.toUpperCase(base.charAt(0)) + base.substring(1);
- base = "com.att.research.datarouter.reports."+base+"Report";
- try {
- @SuppressWarnings("unchecked")
- Class extends ReportBase> cl = (Class extends ReportBase>) Class.forName(base);
- Constructor extends ReportBase> con = cl.getConstructor();
- report = con.newInstance();
- } catch (Exception e) {
- System.err.println("Unknown report type: "+args[i]);
- System.exit(1);
- }
- }
- } else if (from == null) {
- from = args[i];
- } else {
- to = args[i];
- }
- }
- long lfrom = 0, lto = 0;
- if (from == null) {
- // last 7 days
- TimeZone utc = TimeZone.getTimeZone("UTC");
- Calendar cal = new GregorianCalendar(utc);
- cal.set(Calendar.HOUR_OF_DAY, 0);
- cal.set(Calendar.MINUTE, 0);
- cal.set(Calendar.SECOND, 0);
- cal.set(Calendar.MILLISECOND, 0);
- lfrom = cal.getTimeInMillis() - (7 * 24 * 60 * 60 * 1000L); // 1 week
- lto = cal.getTimeInMillis() - 1;
- } else if (to == null) {
- try {
- String[] dates = getDates(from);
- lfrom = Long.parseLong(dates[0]);
- lto = Long.parseLong(dates[1]);
- } catch (Exception e) {
- System.err.println("Invalid date: "+from);
- System.exit(1);
- }
- } else {
- String[] dates;
- try {
- dates = getDates(from);
- lfrom = Long.parseLong(dates[0]);
- } catch (Exception e) {
- System.err.println("Invalid date: "+from);
- System.exit(1);
- }
- try {
- dates = getDates(to);
- lto = Long.parseLong(dates[0]);
- } catch (Exception e) {
- System.err.println("Invalid date: "+to);
- System.exit(1);
- }
- }
-
- report.setFrom(lfrom);
- report.setTo(lto);
- report.setOutputFile(outfile);
- report.run();
- }
-
- private static String[] getDates(String d) throws Exception {
- if (d.equals("ALL"))
- return new String[] { "1", ""+System.currentTimeMillis() };
-
- TimeZone utc = TimeZone.getTimeZone("UTC");
- Calendar cal = new GregorianCalendar(utc);
- if (d.matches("20\\d\\d-\\d\\d-\\d\\d")) {
- cal.set(Calendar.YEAR, Integer.parseInt(d.substring(0, 4)));
- cal.set(Calendar.MONTH, Integer.parseInt(d.substring(5, 7))-1);
- cal.set(Calendar.DAY_OF_MONTH, Integer.parseInt(d.substring(8, 10)));
- } else if (d.equals("yesterday")) {
- cal.add(Calendar.DAY_OF_YEAR, -1);
- } else
- throw new Exception("wa?");
- cal.set(Calendar.HOUR_OF_DAY, 0);
- cal.set(Calendar.MINUTE, 0);
- cal.set(Calendar.SECOND, 0);
- cal.set(Calendar.MILLISECOND, 0);
- long start = cal.getTimeInMillis();
- long end = start + (24 * 60 * 60 * 1000L) - 1;
- return new String[] { ""+start, ""+end };
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/ReportBase.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/ReportBase.java
deleted file mode 100644
index 2bdabf19..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/ReportBase.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-package com.att.research.datarouter.reports;
-
-import org.apache.log4j.Logger;
-
-/**
- * Base class for all the report generating classes.
- *
- * @author Robert P. Eby
- * @version $Id: ReportBase.java,v 1.1 2013/10/28 18:06:53 eby Exp $
- */
-abstract public class ReportBase implements Runnable {
- protected long from, to;
- protected String outfile;
- protected Logger logger;
-
- public ReportBase() {
- this.from = 0;
- this.to = System.currentTimeMillis();
- this.logger = Logger.getLogger("com.att.research.datarouter.reports");
- }
-
- public void setFrom(long from) {
- this.from = from;
- }
-
- public void setTo(long to) {
- this.to = to;
- }
-
- public String getOutfile() {
- return outfile;
- }
-
- public void setOutputFile(String s) {
- this.outfile = s;
- }
-
- @Override
- abstract public void run();
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/SubscriberReport.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/SubscriberReport.java
deleted file mode 100644
index b003ab12..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/SubscriberReport.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeSet;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a subscribers report. The report is a .CSV file. It contains information per-day and per-subscriber,
- * on the status codes returned from each delivery attempt (1XX, 2XX, etc.) as well as a count of 4XX instead of a 100.
- *
- * @author Robert P. Eby
- * @version $Id: SubscriberReport.java,v 1.2 2013/11/06 16:23:55 eby Exp $
- */
-public class SubscriberReport extends ReportBase {
- private static final String SELECT_SQL =
- "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, RESULT, COUNT(RESULT) as COUNT" +
- " from LOG_RECORDS" +
- " where TYPE = 'del' and EVENT_TIME >= ? and EVENT_TIME <= ?" +
- " group by DATE, DELIVERY_SUBID, RESULT";
- private static final String SELECT_SQL2 =
- "select date(from_unixtime(EVENT_TIME div 1000)) as DATE, DELIVERY_SUBID, COUNT(CONTENT_LENGTH_2) as COUNT" +
- " from LOG_RECORDS" +
- " where TYPE = 'dlx' and CONTENT_LENGTH_2 = -1 and EVENT_TIME >= ? and EVENT_TIME <= ?" +
- " group by DATE, DELIVERY_SUBID";
-
- private class Counters {
- private String date;
- private int sub;
- private int c100, c200, c300, c400, c500, cm1, cdlx;
- public Counters(String date, int sub) {
- this.date = date;
- this.sub = sub;
- c100 = c200 = c300 = c400 = c500 = cm1 = cdlx = 0;
- }
- public void addCounts(int status, int n) {
- if (status < 0) {
- cm1 += n;
- } else if (status >= 100 && status <= 199) {
- c100 += n;
- } else if (status >= 200 && status <= 299) {
- c200 += n;
- } else if (status >= 300 && status <= 399) {
- c300 += n;
- } else if (status >= 400 && status <= 499) {
- c400 += n;
- } else if (status >= 500 && status <= 599) {
- c500 += n;
- }
- }
- public void addDlxCount(int n) {
- cdlx += n;
- }
- @Override
- public String toString() {
- return date + "," + sub + "," +
- c100 + "," + c200 + "," + c300 + "," + c400 + "," + c500 + "," +
- cm1 + "," + cdlx;
- }
- }
-
- @Override
- public void run() {
- Map map = new HashMap();
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ResultSet rs = ps.executeQuery();
- while (rs.next()) {
- String date = rs.getString("DATE");
- int sub = rs.getInt("DELIVERY_SUBID");
- int res = rs.getInt("RESULT");
- int count = rs.getInt("COUNT");
- String key = date + "," + sub;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, sub);
- map.put(key, c);
- }
- c.addCounts(res, count);
- }
- rs.close();
- ps.close();
-
- ps = conn.prepareStatement(SELECT_SQL2);
- ps.setLong(1, from);
- ps.setLong(2, to);
- rs = ps.executeQuery();
- while (rs.next()) {
- String date = rs.getString("DATE");
- int sub = rs.getInt("DELIVERY_SUBID");
- int count = rs.getInt("COUNT");
- String key = date + "," + sub;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters(date, sub);
- map.put(key, c);
- }
- c.addDlxCount(count);
- }
- rs.close();
- ps.close();
-
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
- os.println("date,subid,count100,count200,count300,count400,count500,countminus1,countdlx");
- for (String key : new TreeSet(map.keySet())) {
- Counters c = map.get(key);
- os.println(c.toString());
- }
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/VolumeReport.java b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/VolumeReport.java
deleted file mode 100644
index 92a85e2e..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/VolumeReport.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*******************************************************************************
- * ============LICENSE_START==================================================
- * * org.onap.dmaap
- * * ===========================================================================
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * * ===========================================================================
- * * Licensed under the Apache License, Version 2.0 (the "License");
- * * you may not use this file except in compliance with the License.
- * * You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- * * ============LICENSE_END====================================================
- * *
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- * *
- ******************************************************************************/
-
-
-package com.att.research.datarouter.reports;
-
-import java.io.FileNotFoundException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.TreeSet;
-
-import com.att.research.datarouter.provisioning.utils.DB;
-
-/**
- * Generate a traffic volume report. The report is a .csv file containing the following columns:
- *
- * date the date for this record
- * feedid the Feed ID for this record
- * filespublished the number of files published on this feed and date
- * bytespublished the number of bytes published on this feed and date
- * filesdelivered the number of files delivered on this feed and date
- * bytesdelivered the number of bytes delivered on this feed and date
- * filesexpired the number of files expired on this feed and date
- * bytesexpired the number of bytes expired on this feed and date
- *
- *
- * @author Robert P. Eby
- * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $
- */
-public class VolumeReport extends ReportBase {
- private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +
- " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";
-
- private class Counters {
- public int filespublished, filesdelivered, filesexpired;
- public long bytespublished, bytesdelivered, bytesexpired;
- @Override
- public String toString() {
- return String.format("%d,%d,%d,%d,%d,%d",
- filespublished, bytespublished, filesdelivered,
- bytesdelivered, filesexpired, bytesexpired);
- }
- }
-
- @Override
- public void run() {
- Map map = new HashMap();
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
- long start = System.currentTimeMillis();
- try {
- DB db = new DB();
- @SuppressWarnings("resource")
- Connection conn = db.getConnection();
- // We need to run this SELECT in stages, because otherwise we run out of memory!
- final long stepsize = 6000000L;
- boolean go_again = true;
- for (long i = 0; go_again; i += stepsize) {
- PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
- ps.setLong(1, from);
- ps.setLong(2, to);
- ps.setLong(3, i);
- ps.setLong(4, stepsize);
- ResultSet rs = ps.executeQuery();
- go_again = false;
- while (rs.next()) {
- go_again = true;
- long etime = rs.getLong("EVENT_TIME");
- String type = rs.getString("TYPE");
- int feed = rs.getInt("FEEDID");
- long clen = rs.getLong("CONTENT_LENGTH");
- String key = sdf.format(new Date(etime)) + ":" + feed;
- Counters c = map.get(key);
- if (c == null) {
- c = new Counters();
- map.put(key, c);
- }
- if (type.equalsIgnoreCase("pub")) {
- c.filespublished++;
- c.bytespublished += clen;
- } else if (type.equalsIgnoreCase("del")) {
- // Only count successful deliveries
- int statusCode = rs.getInt("RESULT");
- if (statusCode >= 200 && statusCode < 300) {
- c.filesdelivered++;
- c.bytesdelivered += clen;
- }
- } else if (type.equalsIgnoreCase("exp")) {
- c.filesexpired++;
- c.bytesexpired += clen;
- }
- }
- rs.close();
- ps.close();
- }
- db.release(conn);
- } catch (SQLException e) {
- e.printStackTrace();
- }
- logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
- try {
- PrintWriter os = new PrintWriter(outfile);
- os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");
- for (String key : new TreeSet(map.keySet())) {
- Counters c = map.get(key);
- String[] p = key.split(":");
- os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));
- }
- os.close();
- } catch (FileNotFoundException e) {
- System.err.println("File cannot be written: "+outfile);
- }
- }
-}
diff --git a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/package.html b/datarouter-prov/src/main/java/com/att/research/datarouter/reports/package.html
deleted file mode 100644
index 2c2d26b3..00000000
--- a/datarouter-prov/src/main/java/com/att/research/datarouter/reports/package.html
+++ /dev/null
@@ -1,43 +0,0 @@
-#-------------------------------------------------------------------------------
-# ============LICENSE_START==================================================
-# * org.onap.dmaap
-# * ===========================================================================
-# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# * ===========================================================================
-# * Licensed under the Apache License, Version 2.0 (the "License");
-# * you may not use this file except in compliance with the License.
-# * You may obtain a copy of the License at
-# *
-# * http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# * ============LICENSE_END====================================================
-# *
-# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
-# *
-#-------------------------------------------------------------------------------
-
-
-
-
-This package provides various classes which are used to generate .CSV files from the logs
-in the database.
-The .CSV files can then be used to generate reports on another web server external from the DR network.
-
-
-The classes in this package, and the reports they generate are:
-
-
-Class Report
-{@link com.att.research.datarouter.reports.DailyLatencyReport} dailylatency.csv
-{@link com.att.research.datarouter.reports.FeedReport} NOT CURRENTLY USED
-{@link com.att.research.datarouter.reports.LatencyReport}
-{@link com.att.research.datarouter.reports.SubscriberReport} subscriber.csv
-{@link com.att.research.datarouter.reports.VolumeReport} volumes.csv
-
-
-
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponse.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponse.java
new file mode 100644
index 00000000..e078429c
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponse.java
@@ -0,0 +1,58 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+package org.onap.dmaap.datarouter.authz;
+
+import java.util.List;
+
+/**
+ * The AuthorizationResponse
interface gives the caller access to information about an authorization
+ * decision. This information includes the permit/deny decision itself, along with supplementary information in the form of
+ * advice and obligations. (The advice and obligations will not be used in Data Router R1.)
+ *
+ * @author J. F. Lucas
+ *
+ */
+public interface AuthorizationResponse {
+ /**
+ * Indicates whether the request is authorized or not.
+ *
+ * @return a boolean flag that is true
if the request is permitted, and false
otherwise.
+ */
+ public boolean isAuthorized();
+
+ /**
+ * Returns any advice elements that were included in the authorization response.
+ *
+ * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
+ * advice element from the authorization response.
+ */
+ public List getAdvice();
+
+ /**
+ * Returns any obligation elements that were included in the authorization response.
+ *
+ * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
+ * obligation element from the authorization response.
+ */
+ public List getObligations();
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponseSupplement.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponseSupplement.java
new file mode 100644
index 00000000..adb44ba3
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/AuthorizationResponseSupplement.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.authz;
+
+import java.util.Map;
+
+/** An object that meets the AuthorizationResponseSupplement
interface carries supplementary
+ * information for an authorization response. In a XACML-based system, a response to an authorization request
+ * carries not just the permit/deny decision but, optionally, supplemental information in the form of advice and
+ * obligation elements. The structure of a XACML advice element and a XACML obligation element are similar: each has an identifier and
+ * a set of attributes (name-value) pairs. (The difference between a XACML advice element and a XACML obligation element is in
+ * how the recipient of the response--the Policy Enforcement Point, in XACML terminology--handles the element.)
+ *
+ * @author J. F. Lucas
+ *
+ */
+public interface AuthorizationResponseSupplement {
+ /** Return the identifier for the supplementary information element.
+ *
+ * @return a String
containing the identifier.
+ */
+ public String getId();
+
+ /** Return the attributes for the supplementary information element, as a Map
in which
+ * keys represent attribute identifiers and values represent attribute values.
+ *
+ * @return attributes for the supplementary information element.
+ */
+ public Map getAttributes();
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java
new file mode 100644
index 00000000..44719e66
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/Authorizer.java
@@ -0,0 +1,62 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.authz;
+
+import java.util.Map;
+import javax.servlet.http.HttpServletRequest;
+
+/**
+ * A Data Router API that requires authorization of incoming requests creates an instance of a class that implements
+ * the Authorizer
interface. The class implements all of the logic necessary to determine if an API
+ * request is permitted. In Data Router R1, the classes that implement the Authorizer
interface will have
+ * local logic that makes the authorization decision. After R1, these classes will instead have logic that creates XACML
+ * authorization requests, sends these requests to a Policy Decision Point (PDP), and parses the XACML responses.
+ *
+ * @author J. F. Lucas
+ *
+ */
+public interface Authorizer {
+ /**
+ * Determine if the API request carried in the request
parameter is permitted.
+ *
+ * @param request the HTTP request for which an authorization decision is needed
+ * @return an object implementing the AuthorizationResponse
interface. This object includes the
+ * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
+ * of advice and obligations.
+ */
+ public AuthorizationResponse decide(HttpServletRequest request);
+
+ /**
+ * Determine if the API request carried in the request
parameter, with additional attributes provided in
+ * the additionalAttrs
parameter, is permitted.
+ *
+ * @param request the HTTP request for which an authorization decision is needed
+ * @param additionalAttrs additional attributes that the Authorizer
can in making an authorization decision
+ * @return an object implementing the AuthorizationResponse
interface. This object includes the
+ * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
+ * of advice and obligations.
+ */
+ public AuthorizationResponse decide(HttpServletRequest request, Map additionalAttrs);
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
new file mode 100644
index 00000000..cb608c3b
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespImpl.java
@@ -0,0 +1,97 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.authz.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.onap.dmaap.datarouter.authz.AuthorizationResponse;
+import org.onap.dmaap.datarouter.authz.AuthorizationResponseSupplement;
+
+
+/** A representation of an authorization response returned by a XACML Policy Decision Point.
+ * In Data Router R1, advice and obligations are not used.
+ * @author J. F. Lucas
+ *
+ */
+public class AuthRespImpl implements AuthorizationResponse {
+ private boolean authorized;
+ private List advice;
+ private List obligations;
+
+ /** Constructor. This version will not be used in Data Router R1 since we will not have advice and obligations.
+ *
+ * @param authorized flag indicating whether the response carried a permit response (true
)
+ * or something else (false
).
+ * @param advice list of advice elements returned in the response.
+ * @param obligations list of obligation elements returned in the response.
+ */
+ public AuthRespImpl(boolean authorized, List advice, List obligations) {
+ this.authorized = authorized;
+ this.advice = (advice == null ? null : new ArrayList (advice));
+ this.obligations = (obligations == null ? null : new ArrayList (obligations));
+ }
+
+ /** Constructor. Simple version for authorization responses that have no advice and no obligations.
+ *
+ * @param authorized flag indicating whether the response carried a permit (true
) or something else (false
).
+ */
+ public AuthRespImpl(boolean authorized) {
+ this(authorized, null, null);
+ }
+
+ /**
+ * Indicates whether the request is authorized or not.
+ *
+ * @return a boolean flag that is true
if the request is permitted, and false
otherwise.
+ */
+ @Override
+ public boolean isAuthorized() {
+ return authorized;
+ }
+
+ /**
+ * Returns any advice elements that were included in the authorization response.
+ *
+ * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
+ * advice element from the authorization response.
+ */
+ @Override
+ public List getAdvice() {
+ return advice;
+ }
+
+ /**
+ * Returns any obligation elements that were included in the authorization response.
+ *
+ * @return A list of objects implementing the AuthorizationResponseSupplement
interface, with each object representing an
+ * obligation element from the authorization response.
+ */
+ @Override
+ public List getObligations() {
+ return obligations;
+ }
+
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
new file mode 100644
index 00000000..1f62c274
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthRespSupplementImpl.java
@@ -0,0 +1,71 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.authz.impl;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.onap.dmaap.datarouter.authz.AuthorizationResponseSupplement;
+
+/** Carries supplementary information--an advice or an obligation--from the authorization response returned
+ * by a XACML Policy Decision Point. Not used in Data Router R1.
+ * @author J. F. Lucas
+ *
+ */
+public class AuthRespSupplementImpl implements AuthorizationResponseSupplement {
+
+ private String id = null;
+ private Map attributes = null;
+
+ /** Constructor, available within the package.
+ *
+ * @param id The identifier for the advice or obligation element
+ * @param attributes The attributes (name-value pairs) for the advice or obligation element.
+ */
+ AuthRespSupplementImpl (String id, Map attributes) {
+ this.id = id;
+ this.attributes = new HashMap(attributes);
+ }
+
+ /** Return the identifier for the supplementary information element.
+ *
+ * @return a String
containing the identifier.
+ */
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ /** Return the attributes for the supplementary information element, as a Map
in which
+ * keys represent attribute identifiers and values represent attribute values.
+ *
+ * @return attributes for the supplementary information element.
+ */
+ @Override
+ public Map getAttributes() {
+ return attributes;
+ }
+
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
new file mode 100644
index 00000000..2e957939
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/AuthzResource.java
@@ -0,0 +1,100 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.authz.impl;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/** Internal representation of an authorization resource (the entity to which access is being requested). Consists
+ * of a type and an identifier. The constructor takes the request URI from an HTTP request and checks it against
+ * patterns for the the different resource types. In DR R1, there are four resource types:
+ *
+ * the feeds collection resource, the target of POST requests to create a new feed and GET requests to list
+ * the existing feeds. This is the root resource for the DR provisioning system, and it has no explicit id.
+ *
+ * a feed resource, the target of GET, PUT, and DELETE requests used to manage an existing feed. Each feed
+ * has a unique feed ID.
+ *
+ * a subscription collection resource, the target of POST requests to create a new subscription and GET requests
+ * to list the subscriptions for a feed. Each feed has a subscription collection, and the ID associated with a
+ * subscription collection is the ID of the feed.
+ *
+ * a subscription resource, the target of GET, PUT, and DELETE requests used to manage an existing subscription.
+ * Each subscription has a unique subscription ID.
+ *
+ *
+ * @author J. F. Lucas
+ *
+ */
+public class AuthzResource {
+ private ResourceType type = null;
+ private String id = "";
+
+ /* Construct an AuthzResource by matching a request URI against the various patterns */
+ public AuthzResource(String rURI) {
+ if (rURI != null) {
+ for (ResourceType t : ResourceType.values()) {
+ Matcher m = t.getPattern().matcher(rURI);
+ if (m.find(0)) {
+ this.type = t;
+ if (m.group("id") != null) {
+ this.id = m.group("id");
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ public ResourceType getType() {
+ return this.type;
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ /* Enumeration that helps turn a request URI into something more useful for
+ * authorization purposes by given a type name and a pattern for determining if the URI
+ * represents that resource type.
+ * Highly dependent on the URL scheme, could be parameterized.
+ */
+ public enum ResourceType {
+ FEEDS_COLLECTION("((://[^/]+/)|(^/))(?)$"),
+ SUBS_COLLECTION ("((://[^/]+/)|(^/{0,1}))subscribe/(?[^/]+)$"),
+ FEED("((://[^/]+/)|(^/{0,1}))feed/(?[^/]+)$"),
+ SUB("((://[^/]+/)|(^/{0,1}))subs/(?[^/]+)$");
+
+ private Pattern uriPattern;
+
+ private ResourceType(String patternString) {
+ this.uriPattern = Pattern.compile(patternString);
+ }
+
+ Pattern getPattern() {
+ return this.uriPattern;
+ }
+ }
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
new file mode 100644
index 00000000..6ab9e2ab
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvAuthorizer.java
@@ -0,0 +1,178 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+package org.onap.dmaap.datarouter.authz.impl;
+
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.datarouter.authz.AuthorizationResponse;
+import org.onap.dmaap.datarouter.authz.Authorizer;
+import org.onap.dmaap.datarouter.authz.impl.AuthzResource.ResourceType;
+
+/** Authorizer for the provisioning API for Data Router R1
+ *
+ * @author J. F. Lucas
+ *
+ */
+public class ProvAuthorizer implements Authorizer {
+
+ private Logger log;
+ private ProvDataProvider provData;
+
+ private static final String SUBJECT_HEADER = "X-ATT-DR-ON-BEHALF-OF"; // HTTP header carrying requester identity
+ private static final String SUBJECT_HEADER_GROUP = "X-ATT-DR-ON-BEHALF-OF-GROUP"; // HTTP header carrying requester identity by group Rally : US708115
+ /** Constructor. For the moment, do nothing special. Make it a singleton?
+ *
+ */
+ public ProvAuthorizer(ProvDataProvider provData) {
+ this.provData = provData;
+ this.log = Logger.getLogger(this.getClass());
+ }
+
+ /**
+ * Determine if the API request carried in the request
parameter is permitted.
+ *
+ * @param request the HTTP request for which an authorization decision is needed
+ * @return an object implementing the AuthorizationResponse
interface. This object includes the
+ * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
+ * of advice and obligations.
+ */
+ @Override
+ public AuthorizationResponse decide(HttpServletRequest request) {
+ return this.decide(request, null);
+ }
+
+ /**
+ * Determine if the API request carried in the request
parameter, with additional attributes provided in
+ * the additionalAttrs
parameter, is permitted. additionalAttrs
isn't used in R1.
+ *
+ * @param request the HTTP request for which an authorization decision is needed
+ * @param additionalAttrs additional attributes that the Authorizer
can in making an authorization decision
+ * @return an object implementing the AuthorizationResponse
interface. This object includes the
+ * permit/deny decision for the request and (after R1) supplemental information related to the response in the form
+ * of advice and obligations.
+ */
+ @Override
+ public AuthorizationResponse decide(HttpServletRequest request,
+ Map additionalAttrs) {
+ log.trace ("Entering decide()");
+
+ boolean decision = false;
+
+ // Extract interesting parts of the HTTP request
+ String method = request.getMethod();
+ AuthzResource resource = new AuthzResource(request.getRequestURI());
+ String subject = (request.getHeader(SUBJECT_HEADER)); // identity of the requester
+ String subjectgroup = (request.getHeader(SUBJECT_HEADER_GROUP)); // identity of the requester by group Rally : US708115
+
+ log.trace("Method: " + method + " -- Type: " + resource.getType() + " -- Id: " + resource.getId() +
+ " -- Subject: " + subject);
+
+ // Choose authorization method based on the resource type
+ ResourceType resourceType = resource.getType();
+ if (resourceType != null) {
+
+ switch (resourceType) {
+
+ case FEEDS_COLLECTION:
+ decision = allowFeedsCollectionAccess(resource, method, subject, subjectgroup);
+ break;
+
+ case SUBS_COLLECTION:
+ decision = allowSubsCollectionAccess(resource, method, subject, subjectgroup);
+ break;
+
+ case FEED:
+ decision = allowFeedAccess(resource, method, subject, subjectgroup);
+ break;
+
+ case SUB:
+ decision = allowSubAccess(resource, method, subject, subjectgroup);
+ break;
+
+ default:
+ decision = false;
+ break;
+ }
+ }
+ log.debug("Exit decide(): " + method + "|" + resourceType + "|" + resource.getId() + "|" + subject + " ==> " + decision);
+
+ return new AuthRespImpl(decision);
+ }
+
+ private boolean allowFeedsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
+
+ // Allow GET or POST unconditionally
+ return method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("POST"));
+ }
+
+ private boolean allowSubsCollectionAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
+
+ // Allow GET or POST unconditionally
+ return method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("POST"));
+ }
+
+ private boolean allowFeedAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
+ boolean decision = false;
+
+ // Allow GET, PUT, or DELETE if requester (subject) is the owner (publisher) of the feed
+ if ( method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("PUT") ||
+ method.equalsIgnoreCase("DELETE"))) {
+
+ String owner = provData.getFeedOwner(resource.getId());
+ decision = (owner != null) && owner.equals(subject);
+
+ //Verifying by group Rally : US708115
+ if(subjectgroup != null) {
+ String feedowner = provData.getGroupByFeedGroupId(subject, resource.getId());
+ decision = (feedowner != null) && feedowner.equals(subjectgroup);
+ }
+ }
+
+ return decision;
+ }
+
+ private boolean allowSubAccess(AuthzResource resource, String method, String subject, String subjectgroup) {
+ boolean decision = false;
+
+ // Allow GET, PUT, or DELETE if requester (subject) is the owner of the subscription (subscriber)
+ if (method != null && (method.equalsIgnoreCase("GET") || method.equalsIgnoreCase("PUT") ||
+ method.equalsIgnoreCase("DELETE") || method.equalsIgnoreCase("POST"))) {
+
+ String owner = provData.getSubscriptionOwner(resource.getId());
+ decision = (owner != null) && owner.equals(subject);
+
+ //Verifying by group Rally : US708115
+ if(subjectgroup != null) {
+ String feedowner = provData.getGroupBySubGroupId(subject, resource.getId());
+ decision = (feedowner != null) && feedowner.equals(subjectgroup);
+ }
+ }
+
+ return decision;
+ }
+
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
new file mode 100644
index 00000000..580fe99d
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
@@ -0,0 +1,66 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+package org.onap.dmaap.datarouter.authz.impl;
+
+/** Interface to access data about subscriptions and feeds. A software component that
+ * uses the ProvAuthorizer
needs to supply an implementation of this interface.
+ * @author J. F. Lucas
+ *
+ */
+public interface ProvDataProvider {
+
+ /** Get the identity of the owner of a feed.
+ *
+ * @param feedId the feed ID of the feed whose owner is being looked up.
+ * @return the feed owner's identity
+ */
+ public String getFeedOwner(String feedId);
+
+ /** Get the security classification of a feed.
+ *
+ * @param feedId the ID of the feed whose classification is being looked up.
+ * @return the classification of the feed.
+ */
+ public String getFeedClassification(String feedId);
+
+ /** Get the identity of the owner of a feed
+ *
+ * @param subId the ID of the subscripition whose owner is being looked up.
+ * @return the subscription owner's identity.
+ */
+ public String getSubscriptionOwner(String subId);
+
+ /** Get the identity of the owner of a feed by group id - Rally : US708115
+ *
+ * @param feedid, user the ID of the feed whose owner is being looked up.
+ * @return the feed owner's identity by group.
+ */
+ public String getGroupByFeedGroupId(String owner, String feedId);
+
+ /** Get the identity of the owner of a sub by group id Rally : US708115
+ *
+ * @param subid, user the ID of the feed whose owner is being looked up.
+ * @return the feed owner's identity by group.
+ */
+ public String getGroupBySubGroupId(String owner, String subId);
+}
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/package.html b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/package.html
new file mode 100644
index 00000000..fae27ee0
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/package.html
@@ -0,0 +1,68 @@
+#-------------------------------------------------------------------------------
+# ============LICENSE_START==================================================
+# * org.onap.dmaap
+# * ===========================================================================
+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# * ===========================================================================
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# * ============LICENSE_END====================================================
+# *
+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# *
+#-------------------------------------------------------------------------------
+
+
+
+
+
+
+This package provides an implementation of the authorization-related interfaces
+defined by the com.att.research.datarouter.authz
package, intended for
+use with the provisioning server for Data Router Release 1. In DR R1, we do not
+have an external policy engine, so this implementation performs the authorization
+locally.
+
+
+In order to perform the authorization, this package needs access to provisioning data
+about feeds and subscriptions. This package defines an interface
+(com.att.research.datarouter.authz.impl.ProvDataProvider
) through which it
+expects to get this data. The provisioning server code must provide an implementation
+of this interface.
+
+
+A software component that wishes to use this implementation must:
+
+Provide an implementation of the
+com.att.research.datarouter.authz.impl.ProvDataProvider
+interface.
+
+
+Create an instance of the ProvDataProvider
implementation.
+
+Create an instance of the
+com.att.research.datarouter.authz.impl.ProvAuthorizer
+class defined in this package, passing it an instance of the ProvDataProvider
+implementation.
+
+
+
+
+Example:
+
+
+ProvDataProvider dataProv = new MyDataProvider();
+Authorizer authz = new ProvAuthorizer(dataProv);
+
+
+
+
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/package.html b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/package.html
new file mode 100644
index 00000000..7628ae82
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/package.html
@@ -0,0 +1,38 @@
+#-------------------------------------------------------------------------------
+# ============LICENSE_START==================================================
+# * org.onap.dmaap
+# * ===========================================================================
+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# * ===========================================================================
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# * ============LICENSE_END====================================================
+# *
+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# *
+#-------------------------------------------------------------------------------
+
+
+
+
+
+
+This package defines an interface that can be used by servlet-based HTTP APIs to
+make authorization requests and receive authorization responses from an external
+authorization entity such as a XACML Policy Decision Point (PDP).
+
+
+In Data Router Release 1, there is no external authorization system. The provisioning server
+will use an implementation of this interface for local authorization of provisioning requests.
+
+
+
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
new file mode 100644
index 00000000..45d97485
--- /dev/null
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
@@ -0,0 +1,868 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.provisioning;
+
+import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
+
+import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
+import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.security.cert.X509Certificate;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.List;
+import java.util.ArrayList;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.log4j.Logger;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.onap.dmaap.datarouter.authz.Authorizer;
+import org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer;
+import org.onap.dmaap.datarouter.authz.impl.ProvDataProvider;
+import org.onap.dmaap.datarouter.provisioning.beans.Deleteable;
+import org.onap.dmaap.datarouter.provisioning.beans.Feed;
+import org.onap.dmaap.datarouter.provisioning.beans.Group;
+import org.onap.dmaap.datarouter.provisioning.beans.Insertable;
+import org.onap.dmaap.datarouter.provisioning.beans.NodeClass;
+import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
+import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
+import org.onap.dmaap.datarouter.provisioning.beans.Updateable;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
+import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter;
+import org.json.JSONException;
+import org.slf4j.MDC;
+
+import java.util.Properties;
+import java.util.regex.Pattern;
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import javax.mail.Multipart;
+import javax.mail.Session;
+import javax.mail.Transport;
+import javax.mail.internet.AddressException;
+import javax.mail.internet.InternetAddress;
+import javax.mail.internet.MimeBodyPart;
+import javax.mail.internet.MimeMessage;
+import javax.mail.internet.MimeMultipart;
+/**
+ * This is the base class for all Servlets in the provisioning code.
+ * It provides standard constants and some common methods.
+ *
+ * @author Robert Eby
+ * @version $Id: BaseServlet.java,v 1.16 2014/03/12 19:45:40 eby Exp $
+ */
+@SuppressWarnings("serial")
+public class BaseServlet extends HttpServlet implements ProvDataProvider {
+ public static final String BEHALF_HEADER = "X-ATT-DR-ON-BEHALF-OF";
+ public static final String FEED_BASECONTENT_TYPE = "application/vnd.att-dr.feed";
+ public static final String FEED_CONTENT_TYPE = "application/vnd.att-dr.feed; version=2.0";
+ public static final String FEEDFULL_CONTENT_TYPE = "application/vnd.att-dr.feed-full; version=2.0";
+ public static final String FEEDLIST_CONTENT_TYPE = "application/vnd.att-dr.feed-list; version=1.0";
+ public static final String SUB_BASECONTENT_TYPE = "application/vnd.att-dr.subscription";
+ public static final String SUB_CONTENT_TYPE = "application/vnd.att-dr.subscription; version=2.0";
+ public static final String SUBFULL_CONTENT_TYPE = "application/vnd.att-dr.subscription-full; version=2.0";
+ public static final String SUBLIST_CONTENT_TYPE = "application/vnd.att-dr.subscription-list; version=1.0";
+
+
+ //Adding groups functionality, ...1610
+ public static final String GROUP_BASECONTENT_TYPE = "application/vnd.att-dr.group";
+ public static final String GROUP_CONTENT_TYPE = "application/vnd.att-dr.group; version=2.0";
+ public static final String GROUPFULL_CONTENT_TYPE = "application/vnd.att-dr.group-full; version=2.0";
+ public static final String GROUPLIST_CONTENT_TYPE = "application/vnd.att-dr.fegrouped-list; version=1.0";
+
+
+ public static final String LOGLIST_CONTENT_TYPE = "application/vnd.att-dr.log-list; version=1.0";
+ public static final String PROVFULL_CONTENT_TYPE1 = "application/vnd.att-dr.provfeed-full; version=1.0";
+ public static final String PROVFULL_CONTENT_TYPE2 = "application/vnd.att-dr.provfeed-full; version=2.0";
+ public static final String CERT_ATTRIBUTE = "javax.servlet.request.X509Certificate";
+
+ public static final String DB_PROBLEM_MSG = "There has been a problem with the DB. It is suggested you try the operation again.";
+
+ public static final int DEFAULT_MAX_FEEDS = 10000;
+ public static final int DEFAULT_MAX_SUBS = 100000;
+ public static final int DEFAULT_POKETIMER1 = 5;
+ public static final int DEFAULT_POKETIMER2 = 30;
+ public static final String DEFAULT_DOMAIN = "web.att.com";
+ public static final String DEFAULT_PROVSRVR_NAME = "feeds-drtr.web.att.com";
+ public static final String RESEARCH_SUBNET = "135.207.136.128/25";
+ public static final String STATIC_ROUTING_NODES = ""; //Adding new param for static Routing - Rally:US664862-1610
+
+ /** A boolean to trigger one time "provisioning changed" event on startup */
+ private static boolean startmsg_flag = true;
+ /** This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE) */
+ private static boolean require_secure = true;
+ /** This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT) */
+ private static boolean require_cert = true;
+ /** The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES) */
+ private static Set authorizedAddressesAndNetworks = new HashSet();
+ /** The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS) */
+ private static Set authorizedNames = new HashSet();
+ /** The FQDN of the initially "active" provisioning server in this Data Router ecosystem */
+ private static String initial_active_pod;
+ /** The FQDN of the initially "standby" provisioning server in this Data Router ecosystem */
+ private static String initial_standby_pod;
+ /** The FQDN of this provisioning server in this Data Router ecosystem */
+ private static String this_pod;
+ /** "Timer 1" - used to determine when to notify nodes of provisioning changes */
+ private static long poke_timer1;
+ /** "Timer 2" - used to determine when to notify nodes of provisioning changes */
+ private static long poke_timer2;
+ /** Array of nodes names and/or FQDNs */
+ private static String[] nodes = new String[0];
+ /** Array of node IP addresses */
+ private static InetAddress[] nodeAddresses = new InetAddress[0];
+ /** Array of POD IP addresses */
+ private static InetAddress[] podAddresses = new InetAddress[0];
+ /** The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT) */
+ protected static int max_feeds = 0;
+ /** The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT) */
+ protected static int max_subs = 0;
+ /** The current number of feeds in the system */
+ protected static int active_feeds = 0;
+ /** The current number of subscriptions in the system */
+ protected static int active_subs = 0;
+ /** The domain used to generate a FQDN from the "bare" node names */
+ public static String prov_domain = "web.att.com";
+ /** The standard FQDN of the provisioning server in this Data Router ecosystem */
+ public static String prov_name = "feeds-drtr.web.att.com";
+ /** The standard FQDN of the ACTIVE provisioning server in this Data Router ecosystem */
+ public static String active_prov_name = "feeds-drtr.web.att.com";
+ /** Special subnet that is allowed access to /internal */
+ protected static String special_subnet = RESEARCH_SUBNET;
+
+ /** Special subnet that is allowed access to /internal to Lab Machine */
+ protected static String special_subnet_secondary = RESEARCH_SUBNET;
+ protected static String static_routing_nodes = STATIC_ROUTING_NODES; //Adding new param for static Routing - Rally:US664862-1610
+
+ /** This logger is used to log provisioning events */
+ protected static Logger eventlogger;
+ /** This logger is used to log internal events (errors, etc.) */
+ protected static Logger intlogger;
+ /** Authorizer - interface to the Policy Engine */
+ protected static Authorizer authz;
+ /** The Synchronizer used to sync active DB to standby one */
+ protected static SynchronizerTask synctask = null;
+
+ //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
+ private InetAddress thishost;
+ private InetAddress loopback;
+ private static Boolean mailSendFlag = false;
+
+ public static final String MAILCONFIG_FILE = "mail.properties";
+ private static Properties mailprops;
+ /**
+ * Initialize data common to all the provisioning server servlets.
+ */
+ protected BaseServlet() {
+ if (eventlogger == null)
+ eventlogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.events");
+ if (intlogger == null)
+ intlogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.internal");
+ if (authz == null)
+ authz = new ProvAuthorizer(this);
+ if (startmsg_flag) {
+ startmsg_flag = false;
+ provisioningParametersChanged();
+ }
+ if (synctask == null) {
+ synctask = SynchronizerTask.getSynchronizer();
+ }
+ String name = this.getClass().getName();
+ intlogger.info("PROV0002 Servlet "+name+" started.");
+ }
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ try {
+ thishost = InetAddress.getLocalHost();
+ loopback = InetAddress.getLoopbackAddress();
+ checkHttpsRelaxation(); //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
+ } catch (UnknownHostException e) {
+ // ignore
+ }
+ }
+ protected int getIdFromPath(HttpServletRequest req) {
+ String path = req.getPathInfo();
+ if (path == null || path.length() < 2)
+ return -1;
+ try {
+ return Integer.parseInt(path.substring(1));
+ } catch (NumberFormatException e) {
+ return -1;
+ }
+ }
+ /**
+ * Read the request's input stream and return a JSONObject from it
+ * @param req the HTTP request
+ * @return the JSONObject, or null if the stream cannot be parsed
+ */
+ protected JSONObject getJSONfromInput(HttpServletRequest req) {
+ JSONObject jo = null;
+ try {
+ jo = new JSONObject(new JSONTokener(req.getInputStream()));
+ if (intlogger.isDebugEnabled())
+ intlogger.debug("JSON: "+jo.toString());
+ } catch (Exception e) {
+ intlogger.info("Error reading JSON: "+e);
+ }
+ return jo;
+ }
+ /**
+ * Check if the remote host is authorized to perform provisioning.
+ * Is the request secure?
+ * Is it coming from an authorized IP address or network (configured via PROV_AUTH_ADDRESSES)?
+ * Does it have a valid client certificate (configured via PROV_AUTH_SUBJECTS)?
+ * @param request the request
+ * @return an error string, or null if all is OK
+ */
+ protected String isAuthorizedForProvisioning(HttpServletRequest request) {
+ // Is the request https?
+ if (require_secure && !request.isSecure()) {
+ return "Request must be made over an HTTPS connection.";
+ }
+
+ // Is remote IP authorized?
+ String remote = request.getRemoteAddr();
+ try {
+ boolean found = false;
+ InetAddress ip = InetAddress.getByName(remote);
+ for (String addrnet : authorizedAddressesAndNetworks) {
+ found |= addressMatchesNetwork(ip, addrnet);
+ }
+ if (!found) {
+ return "Unauthorized address: "+remote;
+ }
+ } catch (UnknownHostException e) {
+ return "Unauthorized address: "+remote;
+ }
+
+ // Does remote have a valid certificate?
+ if (require_cert) {
+ X509Certificate certs[] = (X509Certificate[]) request.getAttribute(CERT_ATTRIBUTE);
+ if (certs == null || certs.length == 0) {
+ return "Client certificate is missing.";
+ }
+ // cert[0] is the client cert
+ // see http://www.proto.research.att.com/java/java7/api/javax/net/ssl/SSLSession.html#getPeerCertificates()
+ String name = certs[0].getSubjectX500Principal().getName();
+ if (!authorizedNames.contains(name)) {
+ return "No authorized certificate found.";
+ }
+ }
+
+ // No problems!
+ return null;
+ }
+ /**
+ * Check if the remote IP address is authorized to see the /internal URL tree.
+ * @param request the HTTP request
+ * @return true iff authorized
+ */
+ protected boolean isAuthorizedForInternal(HttpServletRequest request) {
+ try {
+ InetAddress ip = InetAddress.getByName(request.getRemoteAddr());
+ for (InetAddress node : getNodeAddresses()) {
+ if (node != null && ip.equals(node))
+ return true;
+ }
+ for (InetAddress pod : getPodAddresses()) {
+ if (pod != null && ip.equals(pod))
+ return true;
+ }
+ if (thishost != null && ip.equals(thishost))
+ return true;
+ if (loopback != null && ip.equals(loopback))
+ return true;
+ // Also allow the "special subnet" access
+ if (addressMatchesNetwork(ip, special_subnet_secondary))
+ return true;
+ if (addressMatchesNetwork(ip, special_subnet))
+ return true;
+ } catch (UnknownHostException e) {
+ // ignore
+ }
+ return false;
+ }
+ /**
+ * Check if an IP address matches a network address.
+ * @param ip the IP address
+ * @param s the network address; a bare IP address may be matched also
+ * @return true if they intersect
+ */
+ protected static boolean addressMatchesNetwork(InetAddress ip, String s) {
+ int mlen = -1;
+ int n = s.indexOf("/");
+ if (n >= 0) {
+ mlen = Integer.parseInt(s.substring(n+1));
+ s = s.substring(0, n);
+ }
+ try {
+ InetAddress i2 = InetAddress.getByName(s);
+ byte[] b1 = ip.getAddress();
+ byte[] b2 = i2.getAddress();
+ if (b1.length != b2.length)
+ return false;
+ if (mlen > 0) {
+ byte[] masks = {
+ (byte)0x00, (byte)0x80, (byte)0xC0, (byte)0xE0,
+ (byte)0xF0, (byte)0xF8, (byte)0xFC, (byte)0xFE
+ };
+ byte mask = masks[mlen%8];
+ for (n = mlen/8; n < b1.length; n++) {
+ b1[n] &= mask;
+ b2[n] &= mask;
+ mask = 0;
+ }
+ }
+ for (n = 0; n < b1.length; n++)
+ if (b1[n] != b2[n])
+ return false;
+ } catch (UnknownHostException e) {
+ return false;
+ }
+ return true;
+ }
+ /**
+ * Something has changed in the provisioning data.
+ * Start the timers that will cause the pre-packaged JSON string to be regenerated,
+ * and cause nodes and the other provisioning server to be notified.
+ */
+ public static void provisioningDataChanged() {
+ long now = System.currentTimeMillis();
+ Poker p = Poker.getPoker();
+ p.setTimers(now + (poke_timer1 * 1000L), now + (poke_timer2 * 1000L));
+ }
+ /**
+ * Something in the parameters has changed, reload all parameters from the DB.
+ */
+ public static void provisioningParametersChanged() {
+ Map map = Parameters.getParameters();
+ require_secure = getBoolean(map, Parameters.PROV_REQUIRE_SECURE);
+ require_cert = getBoolean(map, Parameters.PROV_REQUIRE_CERT);
+ authorizedAddressesAndNetworks = getSet(map, Parameters.PROV_AUTH_ADDRESSES);
+ authorizedNames = getSet (map, Parameters.PROV_AUTH_SUBJECTS);
+ nodes = getSet (map, Parameters.NODES).toArray(new String[0]);
+ max_feeds = getInt (map, Parameters.PROV_MAXFEED_COUNT, DEFAULT_MAX_FEEDS);
+ max_subs = getInt (map, Parameters.PROV_MAXSUB_COUNT, DEFAULT_MAX_SUBS);
+ poke_timer1 = getInt (map, Parameters.PROV_POKETIMER1, DEFAULT_POKETIMER1);
+ poke_timer2 = getInt (map, Parameters.PROV_POKETIMER2, DEFAULT_POKETIMER2);
+ prov_domain = getString (map, Parameters.PROV_DOMAIN, DEFAULT_DOMAIN);
+ prov_name = getString (map, Parameters.PROV_NAME, DEFAULT_PROVSRVR_NAME);
+ active_prov_name = getString (map, Parameters.PROV_ACTIVE_NAME, prov_name);
+ special_subnet = getString (map, Parameters.PROV_SPECIAL_SUBNET, RESEARCH_SUBNET);
+ static_routing_nodes = getString (map, Parameters.STATIC_ROUTING_NODES, ""); //Adding new param for static Routing - Rally:US664862-1610
+ initial_active_pod = getString (map, Parameters.ACTIVE_POD, "");
+ initial_standby_pod = getString (map, Parameters.STANDBY_POD, "");
+ static_routing_nodes = getString (map, Parameters.STATIC_ROUTING_NODES, ""); //Adding new param for static Routing - Rally:US664862-1610
+ active_feeds = Feed.countActiveFeeds();
+ active_subs = Subscription.countActiveSubscriptions();
+ try {
+ this_pod = InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ this_pod = "";
+ intlogger.warn("PROV0014 Cannot determine the name of this provisioning server.");
+ }
+
+ // Normalize the nodes, and fill in nodeAddresses
+ InetAddress[] na = new InetAddress[nodes.length];
+ for (int i = 0; i < nodes.length; i++) {
+ if (nodes[i].indexOf('.') < 0)
+ nodes[i] += "." + prov_domain;
+ try {
+ na[i] = InetAddress.getByName(nodes[i]);
+ intlogger.debug("PROV0003 DNS lookup: "+nodes[i]+" => "+na[i].toString());
+ } catch (UnknownHostException e) {
+ na[i] = null;
+ intlogger.warn("PROV0004 Cannot lookup "+nodes[i]+": "+e);
+ }
+ }
+
+ //Reset Nodes arr after - removing static routing Nodes, Rally Userstory - US664862 .
+ List filterNodes = new ArrayList<>();
+ for (int i = 0; i < nodes.length; i++) {
+ if(!static_routing_nodes.contains(nodes[i])){
+ filterNodes.add(nodes[i]);
+ }
+ }
+ String [] filteredNodes = filterNodes.toArray(new String[filterNodes.size()]);
+ nodes = filteredNodes;
+
+ nodeAddresses = na;
+ NodeClass.setNodes(nodes); // update NODES table
+
+ // Normalize the PODs, and fill in podAddresses
+ String[] pods = getPods();
+ na = new InetAddress[pods.length];
+ for (int i = 0; i < pods.length; i++) {
+ if (pods[i].indexOf('.') < 0)
+ pods[i] += "." + prov_domain;
+ try {
+ na[i] = InetAddress.getByName(pods[i]);
+ intlogger.debug("PROV0003 DNS lookup: "+pods[i]+" => "+na[i].toString());
+ } catch (UnknownHostException e) {
+ na[i] = null;
+ intlogger.warn("PROV0004 Cannot lookup "+pods[i]+": "+e);
+ }
+ }
+ podAddresses = na;
+
+ // Update ThrottleFilter
+ ThrottleFilter.configure();
+
+ // Check if we are active or standby POD
+ if (!isInitialActivePOD() && !isInitialStandbyPOD())
+ intlogger.warn("PROV0015 This machine is neither the active nor the standby POD.");
+ }
+
+
+ /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
+ * Load mail properties.
+ * @author vs215k
+ *
+ **/
+ private void loadMailProperties() {
+ if (mailprops == null) {
+ mailprops = new Properties();
+ InputStream inStream = getClass().getClassLoader().getResourceAsStream(MAILCONFIG_FILE);
+ try {
+ mailprops.load(inStream);
+ } catch (IOException e) {
+ intlogger.fatal("PROV9003 Opening properties: "+e.getMessage());
+ e.printStackTrace();
+ System.exit(1);
+ }
+ finally {
+ try {
+ inStream.close();
+ }
+ catch (IOException e) {
+ }
+ }
+ }
+ }
+
+ /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
+ * Check if HTTPS Relexaction is enabled
+ * @author vs215k
+ *
+ **/
+ private void checkHttpsRelaxation() {
+ if(mailSendFlag == false) {
+ Properties p = (new DB()).getProperties();
+ intlogger.info("HTTPS relaxatio: "+p.get("com.att.research.datarouter.provserver.https.relaxation"));
+
+ if(p.get("com.att.research.datarouter.provserver.https.relaxation").equals("true")) {
+ try {
+ notifyPSTeam(p.get("com.att.research.datarouter.provserver.https.relax.notify").toString());
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ mailSendFlag = true;
+ }
+ }
+
+ /**Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
+ * @author vs215k
+ * @param email - list of email ids to notify if HTTP relexcation is enabled.
+ **/
+ private void notifyPSTeam(String email) throws Exception {
+ loadMailProperties(); //Load HTTPS Relex mail properties.
+ String[] emails = email.split(Pattern.quote("|"));
+
+ Properties mailproperties = new Properties();
+ mailproperties.put("mail.smtp.host", mailprops.get("com.att.dmaap.datarouter.mail.server"));
+ mailproperties.put("mail.transport.protocol", mailprops.get("com.att.dmaap.datarouter.mail.protocol"));
+
+ Session session = Session.getDefaultInstance(mailproperties, null);
+ Multipart mp = new MimeMultipart();
+ MimeBodyPart htmlPart = new MimeBodyPart();
+
+ try {
+
+ Message msg = new MimeMessage(session);
+ msg.setFrom(new InternetAddress(mailprops.get("com.att.dmaap.datarouter.mail.from").toString()));
+
+ InternetAddress[] addressTo = new InternetAddress[emails.length];
+ for ( int x =0 ; x < emails.length; x++) {
+ addressTo[x] = new InternetAddress(emails[x]);
+ }
+
+ msg.addRecipients(Message.RecipientType.TO, addressTo);
+ msg.setSubject(mailprops.get("com.att.dmaap.datarouter.mail.subject").toString());
+ htmlPart.setContent(mailprops.get("com.att.dmaap.datarouter.mail.body").toString().replace("[SERVER]", InetAddress.getLocalHost().getHostName()), "text/html");
+ mp.addBodyPart(htmlPart);
+ msg.setContent(mp);
+
+ System.out.println(mailprops.get("com.att.dmaap.datarouter.mail.body").toString().replace("[SERVER]", InetAddress.getLocalHost().getHostName()));
+
+ Transport.send(msg);
+ intlogger.info("HTTPS relaxation mail is sent to - : "+email);
+
+ } catch (AddressException e) {
+ intlogger.error("Invalid email address, unable to send https relaxation mail to - : "+email);
+ } catch (MessagingException e) {
+ intlogger.error("Invalid email address, unable to send https relaxation mail to - : "+email);
+ }
+ }
+
+
+ /**
+ * Get an array of all node names in the DR network.
+ * @return an array of Strings
+ */
+ public static String[] getNodes() {
+ return nodes;
+ }
+ /**
+ * Get an array of all node InetAddresses in the DR network.
+ * @return an array of InetAddresses
+ */
+ public static InetAddress[] getNodeAddresses() {
+ return nodeAddresses;
+ }
+ /**
+ * Get an array of all POD names in the DR network.
+ * @return an array of Strings
+ */
+ public static String[] getPods() {
+ return new String[] { initial_active_pod, initial_standby_pod };
+ }
+ /**
+ * Get an array of all POD InetAddresses in the DR network.
+ * @return an array of InetAddresses
+ */
+ public static InetAddress[] getPodAddresses() {
+ return podAddresses;
+ }
+ /**
+ * Gets the FQDN of the initially ACTIVE provisioning server (POD).
+ * Note: this used to be called isActivePOD(), however, that is a misnomer, as the active status
+ * could shift to the standby POD without these parameters changing. Hence, the function names
+ * have been changed to more accurately reflect their purpose.
+ * @return the FQDN
+ */
+ public static boolean isInitialActivePOD() {
+ return this_pod.equals(initial_active_pod);
+ }
+ /**
+ * Gets the FQDN of the initially STANDBY provisioning server (POD).
+ * Note: this used to be called isStandbyPOD(), however, that is a misnomer, as the standby status
+ * could shift to the active POD without these parameters changing. Hence, the function names
+ * have been changed to more accurately reflect their purpose.
+ * @return the FQDN
+ */
+ public static boolean isInitialStandbyPOD() {
+ return this_pod.equals(initial_standby_pod);
+ }
+ /**
+ * INSERT an {@link Insertable} bean into the database.
+ * @param bean the bean representing a row to insert
+ * @return true if the INSERT was successful
+ */
+ protected boolean doInsert(Insertable bean) {
+ boolean rv = false;
+ DB db = new DB();
+ Connection conn = null;
+ try {
+ conn = db.getConnection();
+ rv = bean.doInsert(conn);
+ } catch (SQLException e) {
+ rv = false;
+ intlogger.warn("PROV0005 doInsert: "+e.getMessage());
+ e.printStackTrace();
+ } finally {
+ if (conn != null)
+ db.release(conn);
+ }
+ return rv;
+ }
+ /**
+ * UPDATE an {@link Updateable} bean in the database.
+ * @param bean the bean representing a row to update
+ * @return true if the UPDATE was successful
+ */
+ protected boolean doUpdate(Updateable bean) {
+ boolean rv = false;
+ DB db = new DB();
+ Connection conn = null;
+ try {
+ conn = db.getConnection();
+ rv = bean.doUpdate(conn);
+ } catch (SQLException e) {
+ rv = false;
+ intlogger.warn("PROV0006 doUpdate: "+e.getMessage());
+ e.printStackTrace();
+ } finally {
+ if (conn != null)
+ db.release(conn);
+ }
+ return rv;
+ }
+ /**
+ * DELETE an {@link Deleteable} bean from the database.
+ * @param bean the bean representing a row to delete
+ * @return true if the DELETE was successful
+ */
+ protected boolean doDelete(Deleteable bean) {
+ boolean rv = false;
+ DB db = new DB();
+ Connection conn = null;
+ try {
+ conn = db.getConnection();
+ rv = bean.doDelete(conn);
+ } catch (SQLException e) {
+ rv = false;
+ intlogger.warn("PROV0007 doDelete: "+e.getMessage());
+ e.printStackTrace();
+ } finally {
+ if (conn != null)
+ db.release(conn);
+ }
+ return rv;
+ }
+ private static boolean getBoolean(Map map, String name) {
+ String s = map.get(name);
+ return (s != null) && s.equalsIgnoreCase("true");
+ }
+ private static String getString(Map map, String name, String dflt) {
+ String s = map.get(name);
+ return (s != null) ? s : dflt;
+ }
+ private static int getInt(Map map, String name, int dflt) {
+ try {
+ String s = map.get(name);
+ return Integer.parseInt(s);
+ } catch (NumberFormatException e) {
+ return dflt;
+ }
+ }
+ private static Set getSet(Map map, String name) {
+ Set set = new HashSet