diff options
21 files changed, 2752 insertions, 2675 deletions
@@ -1,4 +1,4 @@ [gerrit] -host=gerrit.openecomp.org +host=gerrit.onap.org port=29418 project=dcae/collectors/ves.git @@ -8,7 +8,7 @@ This is the repository for VES Collector for Open DCAE. This project is organized as a mvn project for a jar package. ``` -git clone ssh://git@<repo-address>:dcae-collectors/OpenVESCollector.git +git clone ssh://git@<repo-address>:dcae/collectors/ves/VESCollector.git mvn clean install ``` @@ -19,7 +19,7 @@ The jar file is bundled with DCM controller jar into a docker image; This will b #### Set up the packaging environment 1. Extract the VESCollector code and do mvn build ``` -$ git clone ssh://git@<repo-address>:dcae-collectors/OpenVESCollector.git +$ git clone ssh://git@<repo-address>:dcae/collectors/ves/VESCollector.git ``` 2. Once the collecter build is successful build dcae-controller @@ -79,10 +79,10 @@ EOF ``` 4. Extract VES collector jar and copy required directory into image build directory ``` -AR=${WORKSPACE}/target/OpenVESCollector-0.0.1-SNAPSHOT-bundle.tar.gz +AR=${WORKSPACE}/target/VESCollector-0.0.1-SNAPSHOT-bundle.tar.gz STAGE=${WORKSPACE}/target/stage APP_DIR=${STAGE}/opt/app/SEC -[ -d ${STAGE}/opt/app/OpenVESCollector-0.0.1-SNAPSHOT ] && rm -rf ${STAGE}/opt/app/OpenVESCollector-0.0.1-SNAPSHOT +[ -d ${STAGE}/opt/app/VESCollector-0.0.1-SNAPSHOT ] && rm -rf ${STAGE}/opt/app/VESCollector-0.0.1-SNAPSHOT [ ! -f $APP_DIR ] && mkdir -p ${APP_DIR} gunzip -c ${AR} | tar xvf - -C ${APP_DIR} --strip-components=1 # lji: removal of ^M in the VES startup script diff --git a/docker-build.sh b/docker-build.sh index 41331f7..e306427 100755 --- a/docker-build.sh +++ b/docker-build.sh @@ -13,19 +13,19 @@ EXT=$(echo "$VERSION" | rev | cut -s -f1 -d'-' | rev) if [ -z "$EXT" ]; then EXT="STAGING" fi -case $phase in +case $phase in verify|merge) if [ "$EXT" != 'SNAPSHOT' ]; then echo "$phase job only takes SNAPSHOT version, got \"$EXT\" instead" exit 1 - fi + fi ;; release) if [ ! -z "$EXT" ] && [ "$EXT" != 'STAGING' ]; then echo "$phase job only takes STAGING or pure numerical version, got \"$EXT\" instead" exit 1 fi - ;; + ;; *) echo "Unknown phase \"$phase\"" exit 1 @@ -49,10 +49,10 @@ DCM_DIR="${STAGE}/opt/app/manager" unzip -qo -d "${DCM_DIR}" "${DCM_AR}" # unarchive the collector -AR=${WORKSPACE}/target/OpenVESCollector-${VERSION}-bundle.tar.gz -APP_DIR=${STAGE}/opt/app/SEC +AR=${WORKSPACE}/target/VESCollector-${VERSION}-bundle.tar.gz +APP_DIR=${STAGE}/opt/app/VESCollector -[ -d "${STAGE}/opt/app/OpenVESCollector-${VERSION}" ] && rm -rf "${STAGE}/opt/app/OpenVESCollector-${VERSION}" +[ -d "${STAGE}/opt/app/VESCollector-${VERSION}" ] && rm -rf "${STAGE}/opt/app/VESCollector-${VERSION}" [ ! -f "${APP_DIR}" ] && mkdir -p "${APP_DIR}" @@ -80,8 +80,8 @@ cd \$WORKDIR echo 10.0.4.102 \$(hostname).dcae.simpledemo.openecomp.org >> /etc/hosts if [ ! -e config ]; then - echo no configuration directory setup: \$WORKDIR/config - exit 1 + echo no configuration directory setup: \$WORKDIR/config + exit 1 fi exec java -cp ./config:./lib:./lib/*:./bin \$MAIN \$ACTION > logs/manager.out 2>logs/manager.err @@ -101,7 +101,7 @@ MAINTAINER dcae@lists.openecomp.org WORKDIR /opt/app/manager -ENV HOME /opt/app/SEC +ENV HOME /opt/app/VESCollector ENV JAVA_HOME /usr RUN apt-get update && apt-get install -y \ @@ -133,8 +133,8 @@ BUILD_PATH="${WORKSPACE}/target/stage" echo docker build --rm -t "${LFQI}" "${BUILD_PATH}" docker build --rm -t "${LFQI}" "${BUILD_PATH}" -case $phase in - verify) +case $phase in + verify) exit 0 ;; esac @@ -172,9 +172,9 @@ OLDTAG="${LFQI}" PUSHTAGS="${REPO}/${IMAGE}:${VERSION}${EXT}${TIMESTAMP} ${REPO}/${IMAGE}:latest ${REPO}/${IMAGE}:${VERSION2}${EXT}-latest" for NEWTAG in ${PUSHTAGS} do - echo "tagging ${OLDTAG} to ${NEWTAG}" + echo "tagging ${OLDTAG} to ${NEWTAG}" docker tag "${OLDTAG}" "${NEWTAG}" - echo "pushing ${NEWTAG}" + echo "pushing ${NEWTAG}" docker push "${NEWTAG}" OLDTAG="${NEWTAG}" done diff --git a/etc/CommonEventFormat_Vendors_v26.0.json b/etc/CommonEventFormat_27.2.json index 88d52d6..14a0c8d 100644 --- a/etc/CommonEventFormat_Vendors_v26.0.json +++ b/etc/CommonEventFormat_27.2.json @@ -1,1383 +1,1165 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - - "definitions": { - "attCopyrightNotice": { - "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved", - "type": "object", - "properties": { - "useAndRedistribution": { - "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:", - "type": "string" - }, - "condition1": { - "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.", - "type": "string" - }, - "condition2": { - "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.", - "type": "string" - }, - "condition3": { - "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.", - "type": "string" - }, - "condition4": { - "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.", - "type": "string" - }, - "disclaimerLine1": { - "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", - "type": "string" - }, - "disclaimerLine2": { - "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", - "type": "string" - }, - "disclaimerLine3": { - "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", - "type": "string" - }, - "disclaimerLine4": { - "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", - "type": "string" - } - } - }, - "codecsInUse": { - "description": "number of times an identified codec was used over the measurementInterval", - "type": "object", - "properties": { - "codecIdentifier": { "type": "string" }, - "numberInUse": { "type": "number" } - }, - "required": [ "codecIdentifier", "numberInUse" ] - }, - "codecSelected": { - "description": "codec selected for the call - 'PCMA', 'G729A', ...", - "type": "object", - "properties": { - "codec": { "type": "string" } - } - }, - "codecSelectedTranscoding": { - "description": "codecs selected for the call, when transcoding is occurring", - "type": "object", - "properties": { - "calleeSideCodec": { "type": "string" }, - "callerSideCodec": { "type": "string" } - } - }, - "command": { - "description": "command from an event collector toward an event source", - "type": "object", - "properties": { - "commandType": { - "type": "string", - "enum": [ - "measurementIntervalChange", - "provideThrottlingState", - "throttlingSpecification" - ] - }, - "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" }, - "measurementInterval": { "type": "number" } - }, - "required": [ "commandType" ] - }, - "commandList": { - "description": "array of commands from an event collector toward an event source", - "type": "array", - "items": { - "$ref": "#/definitions/commandListEntry" - }, - "minItems": 0 - }, - "commandListEntry": { - "description": "reference to a command object", - "type": "object", - "properties": { - "command": {"$ref": "#/definitions/command"} - }, - "required": [ "command" ] - }, - "commonEventHeader": { - "description": "fields common to all events", - "type": "object", - "properties": { - "domain": { - "description": "the eventing domain associated with the event", - "type": "string", - "enum": [ - "fault", - "heartbeat", - "measurementsForVfScaling", - "mobileFlow", - "other", - "serviceEvents", - "signaling", - "stateChange", - "syslog", - "thresholdCrossingAlert" - ] - }, - "eventId": { - "description": "event key that is unique to the event source", - "type": "string" - }, - "eventType": { - "description": "unique event topic name", - "type": "string" - }, - "functionalRole": { - "description": "function of the event source e.g., eNodeB, MME, PCRF", - "type": "string" - }, - "lastEpochMicrosec": { - "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", - "type": "number" - }, - "priority": { - "description": "processing priority", - "type": "string", - "enum": [ - "High", - "Medium", - "Normal", - "Low" - ] - }, - "reportingEntityId": { - "description": "UUID identifying the entity reporting the event, for example an OAM VM", - "type": "string" - }, - "reportingEntityName": { - "description": "name of the entity reporting the event, for example, an OAM VM", - "type": "string" - }, - "sequence": { - "description": "ordering of events communicated by an event source instance or 0 if not needed", - "type": "integer" - }, - "sourceId": { - "description": "UUID identifying the entity experiencing the event issue", - "type": "string" - }, - "sourceName": { - "description": "name of the entity experiencing the event issue", - "type": "string" - }, - "startEpochMicrosec": { - "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", - "type": "number" - }, - "version": { - "description": "version of the event header", - "type": "number" - } - }, - "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec", - "priority", "reportingEntityName", "sequence", - "sourceName", "startEpochMicrosec" ] - }, - "counter": { - "description": "performance counter", - "type": "object", - "properties": { - "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, - "name": { "type": "string" }, - "thresholdCrossed": { "type": "string" }, - "value": { "type": "string"} - }, - "required": [ "criticality", "name", "thresholdCrossed", "value" ] - }, - "cpuUsage": { - "description": "percent usage of an identified CPU", - "type": "object", - "properties": { - "cpuIdentifier": { "type": "string" }, - "percentUsage": { "type": "number" } - }, - "required": [ "cpuIdentifier", "percentUsage" ] - }, - "endOfCallVqmSummaries": { - "description": "", - "type": "object", - "properties": { - "adjacencyName": { - "description": " adjacency name", - "type": "string" - }, - "endpointDescription": { - "description": "‘Caller’, ‘Callee’", - "type": "string", - "enum": ["Caller", "Callee"] - }, - "endpointJitter": { - "description": "", - "type": "number" - }, - "endpointRtpOctetsDiscarded": { - "description": "", - "type": "number" - }, - "endpointRtpOctetsReceived": { - "description": "", - "type": "number" - }, - "endpointRtpOctetsSent": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsDiscarded": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsReceived": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsSent": { - "description": "", - "type": "number" - }, - "localJitter": { - "description": "", - "type": "number" - }, - "localRtpOctetsDiscarded": { - "description": "", - "type": "number" - }, - "localRtpOctetsReceived": { - "description": "", - "type": "number" - }, - "localRtpOctetsSent": { - "description": "", - "type": "number" - }, - "localRtpPacketsDiscarded": { - "description": "", - "type": "number" - }, - "localRtpPacketsReceived": { - "description": "", - "type": "number" - }, - "localRtpPacketsSent": { - "description": "", - "type": "number" - }, - "mosCqe": { - "description": "1-5 1dp", - "type": "number" - }, - "packetsLost": { - "description": "", - "type": "number" - }, - "packetLossPercent": { - "description" : "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", - "type": "number" - }, - "rFactor": { - "description": "0-100", - "type": "number" - }, - "roundTripDelay": { - "description": "millisecs", - "type": "number" - } - } - }, - "errors": { - "description": "receive and transmit errors for the measurements domain", - "type": "object", - "properties": { - "receiveDiscards": { "type": "number" }, - "receiveErrors": { "type": "number" }, - "transmitDiscards": { "type": "number" }, - "transmitErrors": { "type": "number" } - }, - "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ] - }, - "event": { - "description": "generic event format", - "type": "object", - "properties": { - "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, - "faultFields": { "$ref": "#/definitions/faultFields" }, - "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" }, - "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, - "otherFields": { "$ref": "#/definitions/otherFields" }, - "serviceEventsFields": { "$ref": "#/definitions/serviceEventsFields" }, - "signalingFields": { "$ref": "#/definitions/signalingFields" }, - "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, - "syslogFields": { "$ref": "#/definitions/syslogFields" }, - "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" } - }, - "required": [ "commonEventHeader" ] - }, - "eventDomainThrottleSpecification": { - "description": "specification of what information to suppress within an event domain", - "type": "object", - "properties": { - "eventDomain": { - "description": "Event domain enum from the commonEventHeader domain field", - "type": "string" - }, - "suppressedFieldNames": { - "description": "List of optional field names in the event block that should not be sent to the Event Listener", - "type": "array", - "items": { - "type": "string" - } - }, - "suppressedNvPairsList": { - "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field", - "type": "array", - "items": { - "$ref": "#/definitions/suppressedNvPairs" - } - } - }, - "required": [ "eventDomain" ] - }, - "eventDomainThrottleSpecificationList": { - "description": "array of eventDomainThrottleSpecifications", - "type": "array", - "items": { - "$ref": "#/definitions/eventDomainThrottleSpecification" - }, - "minItems": 0 - }, - "eventInstanceIdentifier": { - "description": "event instance identifiers", - "type": "object", - "properties": { - "eventId": { - "description": "event identifier", - "type": "string" - }, - "vendorId": { - "description": "vendor identifier", - "type": "string" - }, - "productId": { - "description": "product identifier", - "type": "string" - }, - "subsystemId": { - "description": "subsystem identifier", - "type": "string" - }, - "eventFriendlyName": { - "description": "event instance friendly name", - "type": "string" - } - }, - "required": [ "eventId", "vendorId" ] - }, - "eventList": { - "description": "array of events", - "type": "array", - "items": { - "$ref": "#/definitions/event" - } - }, - "eventThrottlingState": { - "description": "reports the throttling in force at the event source", - "type": "object", - "properties": { - "eventThrottlingMode": { - "description": "Mode the event manager is in", - "type": "string", - "enum": [ - "normal", - "throttled" - ] - }, - "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" } - }, - "required": [ "eventThrottlingMode" ] - }, - "faultFields": { - "description": "fields specific to fault events", - "type": "object", - "properties": { - "alarmAdditionalInformation": { - "description": "additional alarm information", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "alarmCondition": { - "description": "alarm condition reported by the device", - "type": "string" - }, - "alarmInterfaceA": { - "description": "card, port, channel or interface name of the device generating the alarm", - "type": "string" - }, - "eventSeverity": { - "description": "event severity or priority", - "type": "string", - "enum": [ - "CRITICAL", - "MAJOR", - "MINOR", - "WARNING", - "NORMAL" - ] - }, - "eventSourceType": { - "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", - "type": "string" - }, - "faultFieldsVersion": { - "description": "version of the faultFields block", - "type": "number" - }, - "specificProblem": { - "description": "short description of the alarm or problem", - "type": "string" - }, - "vfStatus": { - "description": "virtual function status enumeration", - "type": "string", - "enum": [ - "Active", - "Idle", - "Preparing to terminate", - "Ready to terminate", - "Requesting termination" - ] - } - }, - "required": [ "alarmCondition", "eventSeverity", - "eventSourceType", "specificProblem", "vfStatus" ] - }, - "featuresInUse": { - "description": "number of times an identified feature was used over the measurementInterval", - "type": "object", - "properties": { - "featureIdentifier": { "type": "string" }, - "featureUtilization": { "type": "number" } - }, - "required": [ "featureIdentifier", "featureUtilization" ] - }, - "field": { - "description": "name value pair", - "type": "object", - "properties": { - "name": { "type": "string" }, - "value": { "type": "string" } - }, - "required": [ "name", "value" ] - }, - "filesystemUsage": { - "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", - "type": "object", - "properties": { - "blockConfigured": { "type": "number" }, - "blockIops": { "type": "number" }, - "blockUsed": { "type": "number" }, - "ephemeralConfigured": { "type": "number" }, - "ephemeralIops": { "type": "number" }, - "ephemeralUsed": { "type": "number" }, - "filesystemName": { "type": "string" } - }, - "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", - "ephemeralIops", "ephemeralUsed", "filesystemName" ] - }, - "gtpPerFlowMetrics": { - "description": "Mobility GTP Protocol per flow metrics", - "type": "object", - "properties": { - "avgBitErrorRate": { - "description": "average bit error rate", - "type": "number" - }, - "avgPacketDelayVariation": { - "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", - "type": "number" - }, - "avgPacketLatency": { - "description": "average delivery latency", - "type": "number" - }, - "avgReceiveThroughput": { - "description": "average receive throughput", - "type": "number" - }, - "avgTransmitThroughput": { - "description": "average transmit throughput", - "type": "number" - }, - "durConnectionFailedStatus": { - "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", - "type": "number" - }, - "durTunnelFailedStatus": { - "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", - "type": "number" - }, - "flowActivatedBy": { - "description": "Endpoint activating the flow", - "type": "string" - }, - "flowActivationEpoch": { - "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", - "type": "number" - }, - "flowActivationMicrosec": { - "description": "Integer microseconds for the start of the flow connection", - "type": "number" - }, - "flowActivationTime": { - "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: ‘Sat, 13 Mar 2010 11:29:05 -0800’", - "type": "string" - }, - "flowDeactivatedBy": { - "description": "Endpoint deactivating the flow", - "type": "string" - }, - "flowDeactivationEpoch": { - "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", - "type": "number" - }, - "flowDeactivationMicrosec": { - "description": "Integer microseconds for the start of the flow connection", - "type": "number" - }, - "flowDeactivationTime": { - "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: ‘Sat, 13 Mar 2010 11:29:05 -0800’", - "type": "string" - }, - "flowStatus": { - "description": "connection status at reporting time as a working / inactive / failed indicator value", - "type": "string" - }, - "gtpConnectionStatus": { - "description": "Current connection state at reporting time", - "type": "string" - }, - "gtpTunnelStatus": { - "description": "Current tunnel state at reporting time", - "type": "string" - }, - "ipTosCountList": { - "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow", - "type": "array", - "uniqueItems": true, - "items": { - "type": "array", - "items": [ - { "type": "string" }, - { "type": "number" } - ], - "additionalItems": false - } - }, - "ipTosList": { - "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", - "type": "array", - "items": { - "type": "string" - } - }, - "largePacketRtt": { - "description": "large packet round trip time", - "type": "number" - }, - "largePacketThreshold": { - "description": "large packet threshold being applied", - "type": "number" - }, - "maxPacketDelayVariation": { - "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", - "type": "number" - }, - "maxReceiveBitRate": { - "description": "maximum receive bit rate", - "type": "number" - }, - "maxTransmitBitRate": { - "description": "maximum transmit bit rate", - "type": "number" - }, - "mobileQciCosCountList": { - "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow", - "type": "array", - "uniqueItems": true, - "items": { - "type": "array", - "items": [ - { "type": "string" }, - { "type": "number" } - ], - "additionalItems": false - } - }, - "mobileQciCosList": { - "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", - "type": "array", - "items": { - "type": "string" - } - }, - "numActivationFailures": { - "description": "Number of failed activation requests, as observed by the reporting node", - "type": "number" - }, - "numBitErrors": { - "description": "number of errored bits", - "type": "number" - }, - "numBytesReceived": { - "description": "number of bytes received, including retransmissions", - "type": "number" - }, - "numBytesTransmitted": { - "description": "number of bytes transmitted, including retransmissions", - "type": "number" - }, - "numDroppedPackets": { - "description": "number of received packets dropped due to errors per virtual interface", - "type": "number" - }, - "numGtpEchoFailures": { - "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", - "type": "number" - }, - "numGtpTunnelErrors": { - "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", - "type": "number" - }, - "numHttpErrors": { - "description": "Http error count", - "type": "number" - }, - "numL7BytesReceived": { - "description": "number of tunneled layer 7 bytes received, including retransmissions", - "type": "number" - }, - "numL7BytesTransmitted": { - "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", - "type": "number" - }, - "numLostPackets": { - "description": "number of lost packets", - "type": "number" - }, - "numOutOfOrderPackets": { - "description": "number of out-of-order packets", - "type": "number" - }, - "numPacketErrors": { - "description": "number of errored packets", - "type": "number" - }, - "numPacketsReceivedExclRetrans": { - "description": "number of packets received, excluding retransmission", - "type": "number" - }, - "numPacketsReceivedInclRetrans": { - "description": "number of packets received, including retransmission", - "type": "number" - }, - "numPacketsTransmittedInclRetrans": { - "description": "number of packets transmitted, including retransmissions", - "type": "number" - }, - "numRetries": { - "description": "number of packet retries", - "type": "number" - }, - "numTimeouts": { - "description": "number of packet timeouts", - "type": "number" - }, - "numTunneledL7BytesReceived": { - "description": "number of tunneled layer 7 bytes received, excluding retransmissions", - "type": "number" - }, - "roundTripTime": { - "description": "round trip time", - "type": "number" - }, - "tcpFlagCountList": { - "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow", - "type": "array", - "uniqueItems": true, - "items": { - "type": "array", - "items": [ - { "type": "string" }, - { "type": "number" } - ], - "additionalItems": false - } - }, - "tcpFlagList": { - "description": "Array of unique TCP Flags observed in the flow", - "type": "array", - "items": { - "type": "string" - } - }, - "timeToFirstByte": { - "description": "Time in milliseconds between the connection activation and first byte received", - "type": "number" - } - }, - "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", - "avgReceiveThroughput", "avgTransmitThroughput", - "flowActivationEpoch", "flowActivationMicrosec", - "flowDeactivationEpoch", "flowDeactivationMicrosec", - "flowDeactivationTime", "flowStatus", - "maxPacketDelayVariation", "numActivationFailures", - "numBitErrors", "numBytesReceived", "numBytesTransmitted", - "numDroppedPackets", "numL7BytesReceived", - "numL7BytesTransmitted", "numLostPackets", - "numOutOfOrderPackets", "numPacketErrors", - "numPacketsReceivedExclRetrans", - "numPacketsReceivedInclRetrans", - "numPacketsTransmittedInclRetrans", - "numRetries", "numTimeouts", "numTunneledL7BytesReceived", - "roundTripTime", "timeToFirstByte" - ] - }, - "latencyBucketMeasure": { - "description": "number of counts falling within a defined latency bucket", - "type": "object", - "properties": { - "countsInTheBucket": { "type": "number" }, - "highEndOfLatencyBucket": { "type": "number" }, - "lowEndOfLatencyBucket": { "type": "number" } - }, - "required": [ "countsInTheBucket" ] - }, - "marker": { - "description": "", - "type": "object", - "properties": { - "phoneNumber": { "type": "string" } - } - }, - "measurementGroup": { - "description": "measurement group", - "type": "object", - "properties": { - "name": { "type": "string" }, - "measurements": { - "description": "array of name value pair measurements", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - } - }, - "required": [ "name", "measurements" ] - }, - "measurementsForVfScalingFields": { - "description": "measurementsForVfScaling fields", - "type": "object", - "properties": { - "additionalMeasurements": { - "description": "additional measurement fields", - "type": "array", - "items": { - "$ref": "#/definitions/measurementGroup" - } - }, - "aggregateCpuUsage": { - "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running", - "type": "number" - }, - "codecUsageArray": { - "description": "array of codecs in use", - "type": "array", - "items": { - "$ref": "#/definitions/codecsInUse" - } - }, - "concurrentSessions": { - "description": "peak concurrent sessions for the VM or VNF over the measurementInterval", - "type": "number" - }, - "configuredEntities": { - "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF", - "type": "number" - }, - "cpuUsageArray": { - "description": "usage of an array of CPUs", - "type": "array", - "items": { - "$ref": "#/definitions/cpuUsage" - } - }, - "errors": { "$ref": "#/definitions/errors" }, - "featureUsageArray": { - "description": "array of features in use", - "type": "array", - "items": { - "$ref": "#/definitions/featuresInUse" - } - }, - "filesystemUsageArray": { - "description": "filesystem usage of the VM on which the VNFC reporting the event is running", - "type": "array", - "items": { - "$ref": "#/definitions/filesystemUsage" - } - }, - "latencyDistribution": { - "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges", - "type": "array", - "items": { - "$ref": "#/definitions/latencyBucketMeasure" - } - }, - "meanRequestLatency": { - "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running", - "type": "number" - }, - "measurementInterval": { - "description": "interval over which measurements are being reported in seconds", - "type": "number" - }, - "measurementsForVfScalingVersion": { - "description": "version of the measurementsForVfScaling block", - "type": "number" - }, - "memoryConfigured": { - "description": "memory configured in the VM on which the VNFC reporting the event is running", - "type": "number" - }, - "memoryUsed": { - "description": "memory usage of the VM on which the VNFC reporting the event is running", - "type": "number" - }, - "numberOfMediaPortsInUse": { - "description": "number of media ports in use", - "type": "number" - }, - "requestRate": { - "description": "peak rate of service requests per second to the VNF over the measurementInterval", - "type": "number" - }, - "vnfcScalingMetric": { - "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC", - "type": "number" - }, - "vNicUsageArray": { - "description": "usage of an array of virtual network interface cards", - "type": "array", - "items": { - "$ref": "#/definitions/vNicUsage" - } - } - }, - "required": [ "measurementInterval" ] - }, - "midCallRtcp": { - "description": "RTCP packet received ", - "type": "object", - "properties": { - "rtcpData": { - "description": "Base64 encoding of the binary RTCP data (excluding Eth/IP/UDP headers) Base64 encoded array of bytes", - "type": "string" - } - } - }, - "mobileFlowFields": { - "description": "mobileFlow fields", - "type": "object", - "properties": { - "applicationType": { - "description": "Application type inferred", - "type": "string" - }, - "appProtocolType": { - "description": "application protocol", - "type": "string" - }, - "appProtocolVersion": { - "description": "application protocol version", - "type": "string" - }, - "cid": { - "description": "cell id", - "type": "string" - }, - "connectionType": { - "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", - "type": "string" - }, - "ecgi": { - "description": "Evolved Cell Global Id", - "type": "string" - }, - "flowDirection": { - "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", - "type": "string" - }, - "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, - "gtpProtocolType": { - "description": "GTP protocol", - "type": "string" - }, - "gtpVersion": { - "description": "GTP protocol version", - "type": "string" - }, - "httpHeader": { - "description": "HTTP request header, if the flow connects to a node referenced by HTTP", - "type": "string" - }, - "imei": { - "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", - "type": "string" - }, - "imsi": { - "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", - "type": "string" - }, - "ipProtocolType": { - "description": "IP protocol type e.g., TCP, UDP, RTP...", - "type": "string" - }, - "ipVersion": { - "description": "IP protocol version e.g., IPv4, IPv6", - "type": "string" - }, - "lac": { - "description": "location area code", - "type": "string" - }, - "mcc": { - "description": "mobile country code", - "type": "string" - }, - "mnc": { - "description": "mobile network code", - "type": "string" - }, - "mobileFlowFieldsVersion": { - "description": "version of the mobileFlowFields block", - "type": "number" - }, - "msisdn": { - "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", - "type": "string" - }, - "otherEndpointIpAddress": { - "description": "IP address for the other endpoint, as used for the flow being reported on", - "type": "string" - }, - "otherEndpointPort": { - "description": "IP Port for the reporting entity, as used for the flow being reported on", - "type": "number" - }, - "otherFunctionalRole": { - "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", - "type": "string" - }, - "rac": { - "description": "routing area code", - "type": "string" - }, - "radioAccessTechnology": { - "description": "Radio Access Technology e.g., 2G, 3G, LTE", - "type": "string" - }, - "reportingEndpointIpAddr": { - "description": "IP address for the reporting entity, as used for the flow being reported on", - "type": "string" - }, - "reportingEndpointPort": { - "description": "IP port for the reporting entity, as used for the flow being reported on", - "type": "number" - }, - "sac": { - "description": "service area code", - "type": "string" - }, - "samplingAlgorithm": { - "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", - "type": "number" - }, - "tac": { - "description": "transport area code", - "type": "string" - }, - "tunnelId": { - "description": "tunnel identifier", - "type": "string" - }, - "vlanId": { - "description": "VLAN identifier used by this flow", - "type": "string" - } - }, - "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", - "ipVersion", "otherEndpointIpAddress", "otherEndpointPort", - "reportingEndpointIpAddr", "reportingEndpointPort" ] - }, - "otherFields": { - "description": "additional fields not reported elsewhere", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "requestError": { - "description": "standard request error data structure", - "type": "object", - "properties": { - "messageId": { - "description": "Unique message identifier of the format ‘ABCnnnn’ where ‘ABC’ is either ‘SVC’ for Service Exceptions or ‘POL’ for Policy Exception", - "type": "string" - }, - "text": { - "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", - "type": "string" - }, - "url": { - "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", - "type": "string" - }, - "variables": { - "description": "List of zero or more strings that represent the contents of the variables used by the message text", - "type": "string" - } - }, - "required": [ "messageId", "text" ] - }, - "serviceEventsFields": { - "description": "service events fields", - "type": "object", - "properties": { - "additionalFields": { - "description": "additional service event fields if needed", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "codecSelected": { - "$ref": "#/definitions/codecSelected" - }, - "codecSelectedTranscoding": { - "$ref": "#/definitions/codecSelectedTranscoding" - }, - "correlator": { - "description": "this is the same for all events on this call", - "type": "string" - }, - "endOfCallVqmSummaries": { - "$ref": "#/definitions/endOfCallVqmSummaries" - }, - "eventInstanceIdentifier": { - "$ref": "#/definitions/eventInstanceIdentifier" - }, - "marker": { - "$ref": "#/definitions/marker" - }, - "midCallRtcp": { - "$ref": "#/definitions/midCallRtcp" - }, - "serviceEventsFieldsVersion": { - "description": "version of the serviceEventsFields block", - "type": "number" - } - }, - "required": [ "eventInstanceIdentifier" ] - }, - "signalingFields": { - "description": "signaling fields", - "type": "object", - "properties": { - "compressedSip": { - "description": "the full SIP request/response including headers and bodies", - "type": "string" - }, - "correlator": { - "description": "this is the same for all events on this call", - "type": "string" - }, - "eventInstanceIdentifier": { - "$ref": "#/definitions/eventInstanceIdentifier" - }, - "localIpAddress": { - "description": "IP address on VNF", - "type": "string" - }, - "localPort": { - "description": "port on VNF", - "type": "string" - }, - "remoteIpAddress": { - "description": "IP address of peer endpoint", - "type": "string" - }, - "remotePort": { - "description": "port of peer endpoint", - "type": "string" - }, - "signalingFieldsVersion": { - "description": "version of the signalingFields block", - "type": "number" - }, - "summarySip": { - "description": "the SIP Method or Response (‘INVITE’, ‘200 OK’, ‘BYE’, etc)", - "type": "string" - } - }, - "required": [ "eventInstanceIdentifier" ] - }, - "stateChangeFields": { - "description": "stateChange fields", - "type": "object", - "properties": { - "additionalFields": { - "description": "additional stateChange fields if needed", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "newState": { - "description": "new state of the entity", - "type": "string", - "enum": [ - "inService", - "maintenance", - "outOfService" - ] - }, - "oldState": { - "description": "previous state of the entity", - "type": "string", - "enum": [ - "inService", - "maintenance", - "outOfService" - ] - }, - "stateChangeFieldsVersion": { - "description": "version of the stateChangeFields block", - "type": "number" - }, - "stateInterface": { - "description": "card or port name of the entity that changed state", - "type": "string" - } - }, - "required": [ "newState", "oldState", "stateInterface" ] - }, - "suppressedNvPairs": { - "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling", - "type": "object", - "properties": { - "nvPairFieldName": { - "description": "Name of the field within which are the nvpair names to suppress", - "type": "string" - }, - "suppressedNvPairNames": { - "description": "Array of nvpair names to suppress within the nvpairFieldName", - "type": "array", - "items": { - "type": "string" - } - } - }, - "required": [ "nvPairFieldName", "suppressedNvPairNames" ] - }, - "syslogFields": { - "description": "sysLog fields", - "type": "object", - "properties": { - "additionalFields": { - "description": "additional syslog fields if needed", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "eventSourceHost": { - "description": "hostname of the device", - "type": "string" - }, - "eventSourceType": { - "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", - "type": "string" - }, - "syslogFacility": { - "description": "numeric code from 0 to 23 for facility--see table in documentation", - "type": "number" - }, - "syslogFieldsVersion": { - "description": "version of the syslogFields block", - "type": "number" - }, - "syslogMsg": { - "description": "syslog message", - "type": "string" - }, - "syslogProc": { - "description": "identifies the application that originated the message", - "type": "string" - }, - "syslogProcId": { - "description": "a change in the value of this field indicates a discontinuity in syslog reporting", - "type": "number" - }, - "syslogSData": { - "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", - "type": "string" - }, - "syslogTag": { - "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", - "type": "string" - }, - "syslogVer": { - "description": "IANA assigned version of the syslog protocol specification - typically 1", - "type": "number" - } - }, - "required": [ "eventSourceType", "syslogMsg", "syslogTag" ] - }, - "thresholdCrossingAlertFields": { - "description": "fields specific to threshold crossing alert events", - "type": "object", - "properties": { - "additionalFields": { - "description": "additional threshold crossing alert fields if needed", - "type": "array", - "items": { - "$ref": "#/definitions/field" - } - }, - "additionalParameters": { - "description": "performance counters", - "type": "array", - "items": { - "$ref": "#/definitions/counter" - } - }, - "alertAction": { - "description": "Event action", - "type": "string", - "enum": [ - "CLEAR", - "CONT", - "SET" - ] - }, - "alertDescription": { - "description": "Unique short alert description such as IF-SHUB-ERRDROP", - "type": "string" - }, - "alertType": { - "description": "Event type", - "type": "string", - "enum": [ - "CARD-ANOMALY", - "ELEMENT-ANOMALY", - "INTERFACE-ANOMALY", - "SERVICE-ANOMALY" - ] - }, - "alertValue": { - "description": "Calculated API value (if applicable)", - "type": "string" - }, - "associatedAlertIdList": { - "description": "List of eventIds associated with the event being reported", - "type": "array", - "items": { "type": "string" } - }, - "collectionTimestamp": { - "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: ‘Sat, 13 Mar 2010 11:29:05 -0800’", - "type": "string" - }, - "dataCollector": { - "description": "Specific performance collector instance used", - "type": "string" - }, - "elementType": { - "description": "type of network element - internal ATT field", - "type": "string" - }, - "eventSeverity": { - "description": "event severity or priority", - "type": "string", - "enum": [ - "CRITICAL", - "MAJOR", - "MINOR", - "WARNING", - "NORMAL" - ] - }, - "eventStartTimestamp": { - "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: ‘Sat, 13 Mar 2010 11:29:05 -0800’", - "type": "string" - }, - "interfaceName": { - "description": "Physical or logical port or card (if applicable)", - "type": "string" - }, - "networkService": { - "description": "network name - internal ATT field", - "type": "string" - }, - "possibleRootCause": { - "description": "Reserved for future use", - "type": "string" - }, - "thresholdCrossingFieldsVersion": { - "description": "version of the thresholdCrossingAlertFields block", - "type": "number" - } - }, - "required": [ - "additionalParameters", - "alertAction", - "alertDescription", - "alertType", - "collectionTimestamp", - "eventSeverity", - "eventStartTimestamp" - ] - }, - "vNicUsage": { - "description": "usage of identified virtual network interface card", - "type": "object", - "properties": { - "broadcastPacketsIn": { "type": "number" }, - "broadcastPacketsOut": { "type": "number" }, - "bytesIn": { "type": "number" }, - "bytesOut": { "type": "number" }, - "multicastPacketsIn": { "type": "number" }, - "multicastPacketsOut": { "type": "number" }, - "packetsIn": { "type": "number" }, - "packetsOut": { "type": "number" }, - "unicastPacketsIn": { "type": "number" }, - "unicastPacketsOut": { "type": "number" }, - "vNicIdentifier": { "type": "string" } - }, - "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"] - } - }, - "title": "Event Listener", - "type": "object", - "properties": { - "event": {"$ref": "#/definitions/event"} - }, - "required": ["event"] -} +{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "definitions": {
+ "attCopyrightNotice": {
+ "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved",
+ "type": "object",
+ "properties": {
+ "useAndRedistribution": {
+ "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:",
+ "type": "string"
+ },
+ "condition1": {
+ "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.",
+ "type": "string"
+ },
+ "condition2": {
+ "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.",
+ "type": "string"
+ },
+ "condition3": {
+ "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.",
+ "type": "string"
+ },
+ "condition4": {
+ "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.",
+ "type": "string"
+ },
+ "disclaimerLine1": {
+ "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS",
+ "type": "string"
+ },
+ "disclaimerLine2": {
+ "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES",
+ "type": "string"
+ },
+ "disclaimerLine3": {
+ "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,",
+ "type": "string"
+ },
+ "disclaimerLine4": {
+ "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.",
+ "type": "string"
+ }
+ }
+ },
+ "codecsInUse": {
+ "description": "number of times an identified codec was used over the measurementInterval",
+ "type": "object",
+ "properties": {
+ "codecIdentifier": { "type": "string" },
+ "numberInUse": { "type": "number" }
+ },
+ "required": [ "codecIdentifier", "numberInUse" ]
+ },
+ "command": {
+ "description": "command from an event collector toward an event source",
+ "type": "object",
+ "properties": {
+ "commandType": {
+ "type": "string",
+ "enum": [
+ "heartbeatIntervalChange",
+ "measurementIntervalChange",
+ "provideThrottlingState",
+ "throttlingSpecification"
+ ]
+ },
+ "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" },
+ "measurementInterval": { "type": "number" }
+ },
+ "required": [ "commandType" ]
+ },
+ "commandList": {
+ "description": "array of commands from an event collector toward an event source",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/commandListEntry"
+ },
+ "minItems": 0
+ },
+ "commandListEntry": {
+ "description": "reference to a command object",
+ "type": "object",
+ "properties": {
+ "command": {"$ref": "#/definitions/command"}
+ },
+ "required": [ "command" ]
+ },
+ "commonEventHeader": {
+ "description": "fields common to all events",
+ "type": "object",
+ "properties": {
+ "domain": {
+ "description": "the eventing domain associated with the event",
+ "type": "string",
+ "enum": [
+ "fault",
+ "heartbeat",
+ "measurementsForVfScaling",
+ "mobileFlow",
+ "other",
+ "stateChange",
+ "syslog",
+ "thresholdCrossingAlert"
+ ]
+ },
+ "eventId": {
+ "description": "event key that is unique to the event source",
+ "type": "string"
+ },
+ "eventType": {
+ "description": "unique event topic name",
+ "type": "string"
+ },
+ "functionalRole": {
+ "description": "function of the event source e.g., eNodeB, MME, PCRF",
+ "type": "string"
+ },
+ "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" },
+ "lastEpochMicrosec": {
+ "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
+ "type": "number"
+ },
+ "priority": {
+ "description": "processing priority",
+ "type": "string",
+ "enum": [
+ "High",
+ "Medium",
+ "Normal",
+ "Low"
+ ]
+ },
+ "reportingEntityId": {
+ "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process",
+ "type": "string"
+ },
+ "reportingEntityName": {
+ "description": "name of the entity reporting the event, for example, an OAM VM",
+ "type": "string"
+ },
+ "sequence": {
+ "description": "ordering of events communicated by an event source instance or 0 if not needed",
+ "type": "integer"
+ },
+ "sourceId": {
+ "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process",
+ "type": "string"
+ },
+ "sourceName": {
+ "description": "name of the entity experiencing the event issue",
+ "type": "string"
+ },
+ "startEpochMicrosec": {
+ "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
+ "type": "number"
+ },
+ "version": {
+ "description": "version of the event header",
+ "type": "number"
+ }
+ },
+ "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec",
+ "priority", "reportingEntityName", "sequence",
+ "sourceName", "startEpochMicrosec" ]
+ },
+ "counter": {
+ "description": "performance counter",
+ "type": "object",
+ "properties": {
+ "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] },
+ "name": { "type": "string" },
+ "thresholdCrossed": { "type": "string" },
+ "value": { "type": "string"}
+ },
+ "required": [ "criticality", "name", "thresholdCrossed", "value" ]
+ },
+ "cpuUsage": {
+ "description": "percent usage of an identified CPU",
+ "type": "object",
+ "properties": {
+ "cpuIdentifier": { "type": "string" },
+ "percentUsage": { "type": "number" }
+ },
+ "required": [ "cpuIdentifier", "percentUsage" ]
+ },
+ "errors": {
+ "description": "receive and transmit errors for the measurements domain",
+ "type": "object",
+ "properties": {
+ "receiveDiscards": { "type": "number" },
+ "receiveErrors": { "type": "number" },
+ "transmitDiscards": { "type": "number" },
+ "transmitErrors": { "type": "number" }
+ },
+ "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ]
+ },
+ "event": {
+ "description": "the root level of the common event format",
+ "type": "object",
+ "properties": {
+ "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" },
+ "faultFields": { "$ref": "#/definitions/faultFields" },
+ "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" },
+ "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" },
+ "otherFields": { "$ref": "#/definitions/otherFields" },
+ "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" },
+ "syslogFields": { "$ref": "#/definitions/syslogFields" },
+ "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" }
+ },
+ "required": [ "commonEventHeader" ]
+ },
+ "eventDomainThrottleSpecification": {
+ "description": "specification of what information to suppress within an event domain",
+ "type": "object",
+ "properties": {
+ "eventDomain": {
+ "description": "Event domain enum from the commonEventHeader domain field",
+ "type": "string"
+ },
+ "suppressedFieldNames": {
+ "description": "List of optional field names in the event block that should not be sent to the Event Listener",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "suppressedNvPairsList": {
+ "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/suppressedNvPairs"
+ }
+ }
+ },
+ "required": [ "eventDomain" ]
+ },
+ "eventDomainThrottleSpecificationList": {
+ "description": "array of eventDomainThrottleSpecifications",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/eventDomainThrottleSpecification"
+ },
+ "minItems": 0
+ },
+ "eventList": {
+ "description": "array of events",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/event"
+ }
+ },
+ "eventThrottlingState": {
+ "description": "reports the throttling in force at the event source",
+ "type": "object",
+ "properties": {
+ "eventThrottlingMode": {
+ "description": "Mode the event manager is in",
+ "type": "string",
+ "enum": [
+ "normal",
+ "throttled"
+ ]
+ },
+ "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" }
+ },
+ "required": [ "eventThrottlingMode" ]
+ },
+ "faultFields": {
+ "description": "fields specific to fault events",
+ "type": "object",
+ "properties": {
+ "alarmAdditionalInformation": {
+ "description": "additional alarm information",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "alarmCondition": {
+ "description": "alarm condition reported by the device",
+ "type": "string"
+ },
+ "alarmInterfaceA": {
+ "description": "card, port, channel or interface name of the device generating the alarm",
+ "type": "string"
+ },
+ "eventSeverity": {
+ "description": "event severity or priority",
+ "type": "string",
+ "enum": [
+ "CRITICAL",
+ "MAJOR",
+ "MINOR",
+ "WARNING",
+ "NORMAL"
+ ]
+ },
+ "eventSourceType": {
+ "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
+ "type": "string"
+ },
+ "faultFieldsVersion": {
+ "description": "version of the faultFields block",
+ "type": "number"
+ },
+ "specificProblem": {
+ "description": "short description of the alarm or problem",
+ "type": "string"
+ },
+ "vfStatus": {
+ "description": "virtual function status enumeration",
+ "type": "string",
+ "enum": [
+ "Active",
+ "Idle",
+ "Preparing to terminate",
+ "Ready to terminate",
+ "Requesting termination"
+ ]
+ }
+ },
+ "required": [ "alarmCondition", "eventSeverity",
+ "eventSourceType", "specificProblem", "vfStatus" ]
+ },
+ "featuresInUse": {
+ "description": "number of times an identified feature was used over the measurementInterval",
+ "type": "object",
+ "properties": {
+ "featureIdentifier": { "type": "string" },
+ "featureUtilization": { "type": "number" }
+ },
+ "required": [ "featureIdentifier", "featureUtilization" ]
+ },
+ "field": {
+ "description": "name value pair",
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "value": { "type": "string" }
+ },
+ "required": [ "name", "value" ]
+ },
+ "filesystemUsage": {
+ "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second",
+ "type": "object",
+ "properties": {
+ "blockConfigured": { "type": "number" },
+ "blockIops": { "type": "number" },
+ "blockUsed": { "type": "number" },
+ "ephemeralConfigured": { "type": "number" },
+ "ephemeralIops": { "type": "number" },
+ "ephemeralUsed": { "type": "number" },
+ "filesystemName": { "type": "string" }
+ },
+ "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured",
+ "ephemeralIops", "ephemeralUsed", "filesystemName" ]
+ },
+ "gtpPerFlowMetrics": {
+ "description": "Mobility GTP Protocol per flow metrics",
+ "type": "object",
+ "properties": {
+ "avgBitErrorRate": {
+ "description": "average bit error rate",
+ "type": "number"
+ },
+ "avgPacketDelayVariation": {
+ "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets",
+ "type": "number"
+ },
+ "avgPacketLatency": {
+ "description": "average delivery latency",
+ "type": "number"
+ },
+ "avgReceiveThroughput": {
+ "description": "average receive throughput",
+ "type": "number"
+ },
+ "avgTransmitThroughput": {
+ "description": "average transmit throughput",
+ "type": "number"
+ },
+ "durConnectionFailedStatus": {
+ "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval",
+ "type": "number"
+ },
+ "durTunnelFailedStatus": {
+ "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval",
+ "type": "number"
+ },
+ "flowActivatedBy": {
+ "description": "Endpoint activating the flow",
+ "type": "string"
+ },
+ "flowActivationEpoch": {
+ "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available",
+ "type": "number"
+ },
+ "flowActivationMicrosec": {
+ "description": "Integer microseconds for the start of the flow connection",
+ "type": "number"
+ },
+ "flowActivationTime": {
+ "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "flowDeactivatedBy": {
+ "description": "Endpoint deactivating the flow",
+ "type": "string"
+ },
+ "flowDeactivationEpoch": {
+ "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time",
+ "type": "number"
+ },
+ "flowDeactivationMicrosec": {
+ "description": "Integer microseconds for the start of the flow connection",
+ "type": "number"
+ },
+ "flowDeactivationTime": {
+ "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "flowStatus": {
+ "description": "connection status at reporting time as a working / inactive / failed indicator value",
+ "type": "string"
+ },
+ "gtpConnectionStatus": {
+ "description": "Current connection state at reporting time",
+ "type": "string"
+ },
+ "gtpTunnelStatus": {
+ "description": "Current tunnel state at reporting time",
+ "type": "string"
+ },
+ "ipTosCountList": {
+ "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "ipTosList": {
+ "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "largePacketRtt": {
+ "description": "large packet round trip time",
+ "type": "number"
+ },
+ "largePacketThreshold": {
+ "description": "large packet threshold being applied",
+ "type": "number"
+ },
+ "maxPacketDelayVariation": {
+ "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets",
+ "type": "number"
+ },
+ "maxReceiveBitRate": {
+ "description": "maximum receive bit rate",
+ "type": "number"
+ },
+ "maxTransmitBitRate": {
+ "description": "maximum transmit bit rate",
+ "type": "number"
+ },
+ "mobileQciCosCountList": {
+ "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "mobileQciCosList": {
+ "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "numActivationFailures": {
+ "description": "Number of failed activation requests, as observed by the reporting node",
+ "type": "number"
+ },
+ "numBitErrors": {
+ "description": "number of errored bits",
+ "type": "number"
+ },
+ "numBytesReceived": {
+ "description": "number of bytes received, including retransmissions",
+ "type": "number"
+ },
+ "numBytesTransmitted": {
+ "description": "number of bytes transmitted, including retransmissions",
+ "type": "number"
+ },
+ "numDroppedPackets": {
+ "description": "number of received packets dropped due to errors per virtual interface",
+ "type": "number"
+ },
+ "numGtpEchoFailures": {
+ "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2",
+ "type": "number"
+ },
+ "numGtpTunnelErrors": {
+ "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1",
+ "type": "number"
+ },
+ "numHttpErrors": {
+ "description": "Http error count",
+ "type": "number"
+ },
+ "numL7BytesReceived": {
+ "description": "number of tunneled layer 7 bytes received, including retransmissions",
+ "type": "number"
+ },
+ "numL7BytesTransmitted": {
+ "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions",
+ "type": "number"
+ },
+ "numLostPackets": {
+ "description": "number of lost packets",
+ "type": "number"
+ },
+ "numOutOfOrderPackets": {
+ "description": "number of out-of-order packets",
+ "type": "number"
+ },
+ "numPacketErrors": {
+ "description": "number of errored packets",
+ "type": "number"
+ },
+ "numPacketsReceivedExclRetrans": {
+ "description": "number of packets received, excluding retransmission",
+ "type": "number"
+ },
+ "numPacketsReceivedInclRetrans": {
+ "description": "number of packets received, including retransmission",
+ "type": "number"
+ },
+ "numPacketsTransmittedInclRetrans": {
+ "description": "number of packets transmitted, including retransmissions",
+ "type": "number"
+ },
+ "numRetries": {
+ "description": "number of packet retries",
+ "type": "number"
+ },
+ "numTimeouts": {
+ "description": "number of packet timeouts",
+ "type": "number"
+ },
+ "numTunneledL7BytesReceived": {
+ "description": "number of tunneled layer 7 bytes received, excluding retransmissions",
+ "type": "number"
+ },
+ "roundTripTime": {
+ "description": "round trip time",
+ "type": "number"
+ },
+ "tcpFlagCountList": {
+ "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "tcpFlagList": {
+ "description": "Array of unique TCP Flags observed in the flow",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "timeToFirstByte": {
+ "description": "Time in milliseconds between the connection activation and first byte received",
+ "type": "number"
+ }
+ },
+ "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency",
+ "avgReceiveThroughput", "avgTransmitThroughput",
+ "flowActivationEpoch", "flowActivationMicrosec",
+ "flowDeactivationEpoch", "flowDeactivationMicrosec",
+ "flowDeactivationTime", "flowStatus",
+ "maxPacketDelayVariation", "numActivationFailures",
+ "numBitErrors", "numBytesReceived", "numBytesTransmitted",
+ "numDroppedPackets", "numL7BytesReceived",
+ "numL7BytesTransmitted", "numLostPackets",
+ "numOutOfOrderPackets", "numPacketErrors",
+ "numPacketsReceivedExclRetrans",
+ "numPacketsReceivedInclRetrans",
+ "numPacketsTransmittedInclRetrans",
+ "numRetries", "numTimeouts", "numTunneledL7BytesReceived",
+ "roundTripTime", "timeToFirstByte"
+ ]
+ },
+ "internalHeaderFields": {
+ "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources",
+ "type": "object"
+ },
+ "latencyBucketMeasure": {
+ "description": "number of counts falling within a defined latency bucket",
+ "type": "object",
+ "properties": {
+ "countsInTheBucket": { "type": "number" },
+ "highEndOfLatencyBucket": { "type": "number" },
+ "lowEndOfLatencyBucket": { "type": "number" }
+ },
+ "required": [ "countsInTheBucket" ]
+ },
+ "measurementGroup": {
+ "description": "measurement group",
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "measurements": {
+ "description": "array of name value pair measurements",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ }
+ },
+ "required": [ "name", "measurements" ]
+ },
+ "measurementsForVfScalingFields": {
+ "description": "measurementsForVfScaling fields",
+ "type": "object",
+ "properties": {
+ "additionalMeasurements": {
+ "description": "additional measurement fields",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/measurementGroup"
+ }
+ },
+ "aggregateCpuUsage": {
+ "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "codecUsageArray": {
+ "description": "array of codecs in use",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/codecsInUse"
+ }
+ },
+ "concurrentSessions": {
+ "description": "peak concurrent sessions for the VM or VNF over the measurementInterval",
+ "type": "number"
+ },
+ "configuredEntities": {
+ "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF",
+ "type": "number"
+ },
+ "cpuUsageArray": {
+ "description": "usage of an array of CPUs",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/cpuUsage"
+ }
+ },
+ "errors": { "$ref": "#/definitions/errors" },
+ "featureUsageArray": {
+ "description": "array of features in use",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/featuresInUse"
+ }
+ },
+ "filesystemUsageArray": {
+ "description": "filesystem usage of the VM on which the VNFC reporting the event is running",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/filesystemUsage"
+ }
+ },
+ "latencyDistribution": {
+ "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/latencyBucketMeasure"
+ }
+ },
+ "meanRequestLatency": {
+ "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "measurementInterval": {
+ "description": "interval over which measurements are being reported in seconds",
+ "type": "number"
+ },
+ "measurementsForVfScalingVersion": {
+ "description": "version of the measurementsForVfScaling block",
+ "type": "number"
+ },
+ "memoryConfigured": {
+ "description": "memory in MB configured in the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "memoryUsed": {
+ "description": "memory usage in MB of the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "numberOfMediaPortsInUse": {
+ "description": "number of media ports in use",
+ "type": "number"
+ },
+ "requestRate": {
+ "description": "peak rate of service requests per second to the VNF over the measurementInterval",
+ "type": "number"
+ },
+ "vnfcScalingMetric": {
+ "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC",
+ "type": "number"
+ },
+ "vNicUsageArray": {
+ "description": "usage of an array of virtual network interface cards",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/vNicUsage"
+ }
+ }
+ },
+ "required": [ "measurementInterval" ]
+ },
+ "mobileFlowFields": {
+ "description": "mobileFlow fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional mobileFlow fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "applicationType": {
+ "description": "Application type inferred",
+ "type": "string"
+ },
+ "appProtocolType": {
+ "description": "application protocol",
+ "type": "string"
+ },
+ "appProtocolVersion": {
+ "description": "application protocol version",
+ "type": "string"
+ },
+ "cid": {
+ "description": "cell id",
+ "type": "string"
+ },
+ "connectionType": {
+ "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc",
+ "type": "string"
+ },
+ "ecgi": {
+ "description": "Evolved Cell Global Id",
+ "type": "string"
+ },
+ "flowDirection": {
+ "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow",
+ "type": "string"
+ },
+ "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" },
+ "gtpProtocolType": {
+ "description": "GTP protocol",
+ "type": "string"
+ },
+ "gtpVersion": {
+ "description": "GTP protocol version",
+ "type": "string"
+ },
+ "httpHeader": {
+ "description": "HTTP request header, if the flow connects to a node referenced by HTTP",
+ "type": "string"
+ },
+ "imei": {
+ "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "imsi": {
+ "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "ipProtocolType": {
+ "description": "IP protocol type e.g., TCP, UDP, RTP...",
+ "type": "string"
+ },
+ "ipVersion": {
+ "description": "IP protocol version e.g., IPv4, IPv6",
+ "type": "string"
+ },
+ "lac": {
+ "description": "location area code",
+ "type": "string"
+ },
+ "mcc": {
+ "description": "mobile country code",
+ "type": "string"
+ },
+ "mnc": {
+ "description": "mobile network code",
+ "type": "string"
+ },
+ "mobileFlowFieldsVersion": {
+ "description": "version of the mobileFlowFields block",
+ "type": "number"
+ },
+ "msisdn": {
+ "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "otherEndpointIpAddress": {
+ "description": "IP address for the other endpoint, as used for the flow being reported on",
+ "type": "string"
+ },
+ "otherEndpointPort": {
+ "description": "IP Port for the reporting entity, as used for the flow being reported on",
+ "type": "number"
+ },
+ "otherFunctionalRole": {
+ "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...",
+ "type": "string"
+ },
+ "rac": {
+ "description": "routing area code",
+ "type": "string"
+ },
+ "radioAccessTechnology": {
+ "description": "Radio Access Technology e.g., 2G, 3G, LTE",
+ "type": "string"
+ },
+ "reportingEndpointIpAddr": {
+ "description": "IP address for the reporting entity, as used for the flow being reported on",
+ "type": "string"
+ },
+ "reportingEndpointPort": {
+ "description": "IP port for the reporting entity, as used for the flow being reported on",
+ "type": "number"
+ },
+ "sac": {
+ "description": "service area code",
+ "type": "string"
+ },
+ "samplingAlgorithm": {
+ "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied",
+ "type": "number"
+ },
+ "tac": {
+ "description": "transport area code",
+ "type": "string"
+ },
+ "tunnelId": {
+ "description": "tunnel identifier",
+ "type": "string"
+ },
+ "vlanId": {
+ "description": "VLAN identifier used by this flow",
+ "type": "string"
+ }
+ },
+ "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType",
+ "ipVersion", "otherEndpointIpAddress", "otherEndpointPort",
+ "reportingEndpointIpAddr", "reportingEndpointPort" ]
+ },
+ "otherFields": {
+ "description": "additional fields not reported elsewhere",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "requestError": {
+ "description": "standard request error data structure",
+ "type": "object",
+ "properties": {
+ "messageId": {
+ "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception",
+ "type": "string"
+ },
+ "text": {
+ "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1",
+ "type": "string"
+ },
+ "url": {
+ "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents",
+ "type": "string"
+ },
+ "variables": {
+ "description": "List of zero or more strings that represent the contents of the variables used by the message text",
+ "type": "string"
+ }
+ },
+ "required": [ "messageId", "text" ]
+ },
+ "stateChangeFields": {
+ "description": "stateChange fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional stateChange fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "newState": {
+ "description": "new state of the entity",
+ "type": "string",
+ "enum": [
+ "inService",
+ "maintenance",
+ "outOfService"
+ ]
+ },
+ "oldState": {
+ "description": "previous state of the entity",
+ "type": "string",
+ "enum": [
+ "inService",
+ "maintenance",
+ "outOfService"
+ ]
+ },
+ "stateChangeFieldsVersion": {
+ "description": "version of the stateChangeFields block",
+ "type": "number"
+ },
+ "stateInterface": {
+ "description": "card or port name of the entity that changed state",
+ "type": "string"
+ }
+ },
+ "required": [ "newState", "oldState", "stateInterface" ]
+ },
+ "suppressedNvPairs": {
+ "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling",
+ "type": "object",
+ "properties": {
+ "nvPairFieldName": {
+ "description": "Name of the field within which are the nvpair names to suppress",
+ "type": "string"
+ },
+ "suppressedNvPairNames": {
+ "description": "Array of nvpair names to suppress within the nvpairFieldName",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "required": [ "nvPairFieldName", "suppressedNvPairNames" ]
+ },
+ "syslogFields": {
+ "description": "sysLog fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional syslog fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "eventSourceHost": {
+ "description": "hostname of the device",
+ "type": "string"
+ },
+ "eventSourceType": {
+ "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
+ "type": "string"
+ },
+ "syslogFacility": {
+ "description": "numeric code from 0 to 23 for facility--see table in documentation",
+ "type": "number"
+ },
+ "syslogFieldsVersion": {
+ "description": "version of the syslogFields block",
+ "type": "number"
+ },
+ "syslogMsg": {
+ "description": "syslog message",
+ "type": "string"
+ },
+ "syslogPri": {
+ "description": "0-192 combined severity and facility",
+ "type": "number"
+ },
+ "syslogProc": {
+ "description": "identifies the application that originated the message",
+ "type": "string"
+ },
+ "syslogProcId": {
+ "description": "a change in the value of this field indicates a discontinuity in syslog reporting",
+ "type": "number"
+ },
+ "syslogSData": {
+ "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs",
+ "type": "string"
+ },
+ "syslogSdId": {
+ "description": "0-32 char in format name@number for example ourSDID@32473",
+ "type": "string"
+ },
+ "syslogSev": {
+ "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8",
+ "type": "string"
+ },
+ "syslogTag": {
+ "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided",
+ "type": "string"
+ },
+ "syslogVer": {
+ "description": "IANA assigned version of the syslog protocol specification - typically 1",
+ "type": "number"
+ }
+ },
+ "required": [ "eventSourceType", "syslogMsg", "syslogTag" ]
+ },
+ "thresholdCrossingAlertFields": {
+ "description": "fields specific to threshold crossing alert events",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional threshold crossing alert fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "additionalParameters": {
+ "description": "performance counters",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/counter"
+ }
+ },
+ "alertAction": {
+ "description": "Event action",
+ "type": "string",
+ "enum": [
+ "CLEAR",
+ "CONT",
+ "SET"
+ ]
+ },
+ "alertDescription": {
+ "description": "Unique short alert description such as IF-SHUB-ERRDROP",
+ "type": "string"
+ },
+ "alertType": {
+ "description": "Event type",
+ "type": "string",
+ "enum": [
+ "CARD-ANOMALY",
+ "ELEMENT-ANOMALY",
+ "INTERFACE-ANOMALY",
+ "SERVICE-ANOMALY"
+ ]
+ },
+ "alertValue": {
+ "description": "Calculated API value (if applicable)",
+ "type": "string"
+ },
+ "associatedAlertIdList": {
+ "description": "List of eventIds associated with the event being reported",
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ "collectionTimestamp": {
+ "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "dataCollector": {
+ "description": "Specific performance collector instance used",
+ "type": "string"
+ },
+ "elementType": {
+ "description": "type of network element - internal ATT field",
+ "type": "string"
+ },
+ "eventSeverity": {
+ "description": "event severity or priority",
+ "type": "string",
+ "enum": [
+ "CRITICAL",
+ "MAJOR",
+ "MINOR",
+ "WARNING",
+ "NORMAL"
+ ]
+ },
+ "eventStartTimestamp": {
+ "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "interfaceName": {
+ "description": "Physical or logical port or card (if applicable)",
+ "type": "string"
+ },
+ "networkService": {
+ "description": "network name - internal ATT field",
+ "type": "string"
+ },
+ "possibleRootCause": {
+ "description": "Reserved for future use",
+ "type": "string"
+ },
+ "thresholdCrossingFieldsVersion": {
+ "description": "version of the thresholdCrossingAlertFields block",
+ "type": "number"
+ }
+ },
+ "required": [
+ "additionalParameters",
+ "alertAction",
+ "alertDescription",
+ "alertType",
+ "collectionTimestamp",
+ "eventSeverity",
+ "eventStartTimestamp"
+ ]
+ },
+ "vNicUsage": {
+ "description": "usage of identified virtual network interface card",
+ "type": "object",
+ "properties": {
+ "broadcastPacketsIn": { "type": "number" },
+ "broadcastPacketsOut": { "type": "number" },
+ "bytesIn": { "type": "number" },
+ "bytesOut": { "type": "number" },
+ "multicastPacketsIn": { "type": "number" },
+ "multicastPacketsOut": { "type": "number" },
+ "packetsIn": { "type": "number" },
+ "packetsOut": { "type": "number" },
+ "unicastPacketsIn": { "type": "number" },
+ "unicastPacketsOut": { "type": "number" },
+ "vNicIdentifier": { "type": "string" }
+ },
+ "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"]
+ }
+ },
+ "title": "Event Listener",
+ "type": "object",
+ "properties": {
+ "event": {"$ref": "#/definitions/event"}
+ }
+}
\ No newline at end of file diff --git a/etc/HPProcessingConfig.json b/etc/DmaapConfig.json index 59b6a4d..701bf58 100644 --- a/etc/HPProcessingConfig.json +++ b/etc/DmaapConfig.json @@ -11,4 +11,4 @@ } ] -}
\ No newline at end of file +} diff --git a/etc/collector.properties b/etc/collector.properties index a2f9d60..672a7fe 100644 --- a/etc/collector.properties +++ b/etc/collector.properties @@ -20,8 +20,8 @@ collector.service.port=8080 ## The secure port is required if header.authflag is set to 1 (true) ## Authentication is only supported via secure port -#collector.service.secure.port=8443 - +## When enabled - require valid keystore defined +##collector.service.secure.port=8443 ## The keystore must be setup per installation when secure port is configured collector.keystore.file.location=../etc/keystore @@ -29,7 +29,8 @@ collector.keystore.passwordfile=./etc/passwordfile collector.keystore.alias=tomcat -################################################################################# Processing +############################################################################### +## Processing ## ## If there's a problem that prevents the collector from processing alarms, ## it's normally better to apply back pressure to the caller than to try to @@ -42,34 +43,23 @@ collector.keystore.alias=tomcat ## default no validation checkflag (-1) ## If enabled (1) - schemafile location must be specified collector.schema.checkflag=1 -collector.schema.file=./etc/CommonEventFormat_Vendors_v26.0.json - +collector.schema.file=./etc/CommonEventFormat_27.2.json -## To be used when multiple streamid to be supported for later release -collector.dmaap.streamid=sec_measurement - -## Highland Park processor config is specified as a comma-delimited list of -## files to load. Note that the "phase" is "collector". Also note that the -## collector creates an input channel for the events passed via API. -collector.hpprocessing=./etc/HPProcessingConfig.json -#collector.hpprocessing=./etc/DmaapConfig.json +## List all streamid per domain to be supported. The streamid should match to channel name on dmaapfile +collector.dmaap.streamid=fault=sec_fault,roadm-sec-to-hp|syslog=sec_syslog|heartbeat=sec_heartbeat|measurementsForVfScaling=sec_measurement|mobileFlow=sec_mobileflow|other=sec_other|stateChange=sec_statechange|thresholdCrossingAlert=sec_thresholdCrossingAlert +collector.dmaapfile=./etc/DmaapConfig.json ## Custom ExceptionConfiguration exceptionConfig=./etc/ExceptionConfig.json -## authflag enables/disables basic authentication by the collector -## If enabled (1) - then authid/pwd has to be defined -## Authid and pwd for validating incoming event header request via basic auth -## For initial deploy - below static setting will be used -## And can be updated via controller +## authflag control authentication by the collector +## If enabled (1) - then authlist has to be defined +## When authflag is enabled, only secure port will be supported +## To disable enter 0/-1 header.authflag=0 - -## Combintion of userid,base64 encoded pwd list to be supported +## Combination of userid,base64 encoded pwd list to be supported ## userid and pwd comma separated; pipe delimitation between each pair -header.authlist=secureid,IWRjYWVSb2FkbTEyMyEt|sample1,c2FtcGxlMQ== - -## To be used when multiple accounts to be supported for later release -header.authstore=./etc/userstore +header.authlist=secureid,IWRjYWVSb2FkbTEyMyEt|sample1,c2FtcGxlMQ==|vdnsagg,dmRuc2FnZw== ############################################################################### ## diff --git a/etc/log4j.xml b/etc/log4j.xml index 86d2e5a..19023b6 100644 --- a/etc/log4j.xml +++ b/etc/log4j.xml @@ -82,7 +82,7 @@ </appender> <appender name="ECOMP_ERROR" class="org.apache.log4j.RollingFileAppender"> - <param name="threshold" value="INFO" /> <!-- only WARN and ERROR are allowed in this log --> + <param name="threshold" value="WARN" /> <!-- only WARN and ERROR are allowed in this log --> <param name="File" value="./logs/ecomp/error.log" /> <param name="MaxFileSize" value="128MB"/> <param name="MaxBackupIndex" value="10"/> @@ -90,7 +90,7 @@ </appender> <appender name="ECOMP_DEBUG" class="org.apache.log4j.RollingFileAppender"> - <param name="threshold" value="INFO" /> + <param name="threshold" value="DEBUG" /> <param name="File" value="./logs/ecomp/debug.log" /> <param name="MaxFileSize" value="128MB"/> <param name="MaxBackupIndex" value="20"/> @@ -103,6 +103,8 @@ <appender-ref ref="FILE" /> <appender-ref ref="CONSOLE" /> <appender-ref ref="ECOMP_AUDIT" /> + <appender-ref ref="ECOMP_DEBUG" /> + <appender-ref ref="ECOMP_ERROR" /> </root> <logger name="org.openecomp.dcae.commonFunction.input" additivity="false"> @@ -149,19 +151,21 @@ <appender-ref ref="ECOMP_DEBUG" /> </logger> + <logger name="org.openecomp.dcae.commonFunction.EventPublisher" additivity="false"> + <level value="debug"/> + <appender-ref ref="CONSOLE" /> + <appender-ref ref="FILE" /> + </logger> + <logger name="com.att.nsa.apiClient.http.HttpClient" additivity="false"> <level value="info"/> - <appender-ref ref="ECOMP_ERROR" /> - <appender-ref ref="EFILE"/> <appender-ref ref="FILE" /> <appender-ref ref="CONSOLE" /> </logger> <logger name="com.att.nsa.cambria.client.impl.CambriaSimplerBatchPublisher" additivity="false"> <level value="info"/> - <appender-ref ref="ECOMP_ERROR" /> - <appender-ref ref="EFILE"/> <appender-ref ref="FILE" /> <appender-ref ref="CONSOLE" /> </logger> diff --git a/etc/logback.xml b/etc/logback.xml deleted file mode 100644 index a3c0052..0000000 --- a/etc/logback.xml +++ /dev/null @@ -1,127 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<configuration scan="true"> - - <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender"> - <encoder> - <pattern>%d [%thread] %-5level %logger{36} - %msg%n</pattern> - </encoder> - </appender> - - <appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>./logs/collector.log</file> - <encoder> - <pattern>%d{yyyy-MMM-dd HH:mm:ss,SSS,GMT+0} [%thread] %-5level %logger{36} - %msg%n</pattern> - </encoder> - <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>./logs/collector.%i.log</fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>10</maxIndex> - </rollingPolicy> - <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>128MB</maxFileSize> - </triggeringPolicy> - <!-- - <layout class="org.apache.log4j.PatternLayout"> - <param name="ConversionPattern" value="[%d{yyyy-MMM-dd HH:mm:ss,SSS}][%-5p][%-10t][%-5c][%4L]%m%n" /> - </layout> - --> - </appender> - - <appender name="IFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>./logs/input.log</file> - <encoder> - <pattern>%d{yyyy-MMM-dd HH:mm:ss,SSS,GMT+0} [%thread] %-5level %logger{36} - %msg%n</pattern> - </encoder> - <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>./logs/input.%i.log</fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>10</maxIndex> - </rollingPolicy> - <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>128MB</maxFileSize> - </triggeringPolicy> - <!-- - <layout class="org.apache.log4j.PatternLayout"> - <param name="ConversionPattern" value="[%d{yyyy-MMM-dd HH:mm:ss,SSS}][%-5p][%-10t][%-5c][%4L]%m%n" /> - </layout> - --> - </appender> - - <appender name="OFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>./logs/output.log</file> - <encoder> - <pattern>%d{yyyy-MMM-dd HH:mm:ss,SSS,GMT+0} [%thread] %-5level %logger{36} - %msg%n</pattern> - </encoder> - <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>./logs/output.%i.log</fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>10</maxIndex> - </rollingPolicy> - <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>128MB</maxFileSize> - </triggeringPolicy> - <!-- - <layout class="org.apache.log4j.PatternLayout"> - <param name="ConversionPattern" value="[%d{yyyy-MMM-dd HH:mm:ss,SSS}][%-5p][%-10t][%-5c][%4L]%m%n" /> - </layout> - --> - </appender> - - <appender name="EFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>./logs/error.log</file> - <encoder> - <pattern>%d{yyyy-MMM-dd HH:mm:ss,SSS,GMT+0} [%thread] %-5level %logger{36} - %msg%n</pattern> - </encoder> - <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>./logs/error.%i.log</fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>10</maxIndex> - </rollingPolicy> - <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>128MB</maxFileSize> - </triggeringPolicy> - <!-- - <layout class="org.apache.log4j.PatternLayout"> - <param name="ConversionPattern" value="[%d{yyyy-MMM-dd HH:mm:ss,SSS}][%-5p][%-10t][%-5c][%4L]%m%n" /> - </layout> - --> - </appender> - <appender name="ECOMP_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender"> - <file>./logs/ecomperror.log</file> - <encoder> - <pattern>%d{yyyy-MM-dd'T'HH:mm:ss,GMT+0}+00:00|%X{requestId}||%X{serviceInstanceId}|%-10thread|%X{serverName}|%X{serviceName}|%X{instanceUuid}|%level|%X{severity}|%X{serverIpAddress}|%X{server}|%X{ipAddress}|%X{className}|%X{timer}|%msg%n</pattern> - </encoder> - <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy"> - <fileNamePattern>./logs/ecomperror.%i.log</fileNamePattern> - <minIndex>1</minIndex> - <maxIndex>10</maxIndex> - </rollingPolicy> - <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy"> - <maxFileSize>128MB</maxFileSize> - </triggeringPolicy> - </appender> - <!-- the other 3 ECOMP logs are omitted for this release --> - - <root level="info"> - <appender-ref ref="FILE" /> - <appender-ref ref="CONSOLE" /> - - <!-- if/when the ECOMP team runs this server... - <appender-ref ref="ECOMP_ERROR" /> - --> - </root> - <logger name="org.openecomp.dcae.commonFunction.input" additivity="false"> - <level value="INFO"/> - <appender-ref ref="IFILE"/> - </logger> - - <logger name="org.openecomp.dcae.commonFunction.output" additivity="false"> - <level value="INFO"/> - <appender-ref ref="OFILE"/> - </logger> - - <logger name="org.openecomp.dcae.commonFunction.error" additivity="false"> - <level value="INFO"/> - <appender-ref ref="EFILE"/> - </logger> -</configuration> @@ -1,18 +1,15 @@ -<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.openecomp.dcae.collectors.ves</groupId> - <artifactId>OpenVESCollector</artifactId> + <artifactId>VESCollector</artifactId> <version>1.1.0-SNAPSHOT</version> - <name>OpenVESCollector</name> - <description>OpenVESCollector</description> - + <name>VESCollector</name> + <description>VESCollector</description> <properties> - <compiler.source.version>1.7</compiler.source.version> - <compiler.target.version>1.7</compiler.target.version> + <compiler.target.version>1.7</compiler.target.version> <main.basedir>${project.basedir}</main.basedir> <surefire.plugin.version>2.19.1</surefire.plugin.version> <surefire.report.plugin.version>2.19.1</surefire.report.plugin.version> @@ -34,7 +31,7 @@ <!--PLUGIN SETTINGS --> - <pmd.violation.buildfail>true</pmd.violation.buildfail> + <pmd.violation.buildfail>false</pmd.violation.buildfail> <findbugs.failOnError>true</findbugs.failOnError> <checkstyle.failOnViolation>true</checkstyle.failOnViolation> <!-- <checkstyle.file.name>checkstyle.xml</checkstyle.file.name> --> @@ -47,6 +44,10 @@ <!-- <sonar.host.url>http://localhost:9000</sonar.host.url> --> <!-- <maven.test.skip>true</maven.test.skip> --> <nexusproxy>https://nexus.onap.org</nexusproxy> + <snapshots.path>content/repositories/snapshots/</snapshots.path> + <releases.path>content/repositories/releases/</releases.path> + <site.path>content/sites/site/org/onap/dcae/collectors/ves/${project.artifactId}/${project.version}</site.path> + </properties> <pluginRepositories> @@ -90,19 +91,9 @@ <groupId>com.att.nsa</groupId> <artifactId>nsaServerLibrary</artifactId> <version>1.0.10</version> - <exclusions> - <exclusion> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-log4j12</artifactId> - </exclusion> - <exclusion> - <groupId>log4j</groupId> - <artifactId>log4j</artifactId> - </exclusion> - </exclusions> </dependency> - <!-- <dependency> + <!--<dependency> <groupId>com.att.nsa</groupId> <artifactId>saToolkit</artifactId> <version>1.1.3</version> @@ -111,7 +102,7 @@ <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> - <version>1.7.19</version> + <version>1.7.21</version> </dependency> <dependency> <groupId>log4j</groupId> @@ -119,22 +110,6 @@ <version>1.2.17</version> </dependency> - <!-- embedded tomcat --> - <dependency> - <groupId>org.apache.tomcat</groupId> - <artifactId>tomcat-catalina</artifactId> - <version>7.0.54</version> - </dependency> - <dependency> - <groupId>org.apache.tomcat</groupId> - <artifactId>tomcat-util</artifactId> - <version>7.0.54</version> - </dependency> - <dependency> - <groupId>org.apache.tomcat.embed</groupId> - <artifactId>tomcat-embed-core</artifactId> - <version>7.0.54</version> - </dependency> <!-- https://mvnrepository.com/artifact/org.json/json --> <dependency> @@ -144,14 +119,14 @@ </dependency> </dependencies> -<!-- - <repositories> + + <!-- <repositories> <repository> <id>external-repository</id> <url>https://oss.sonatype.org/content/repositories</url> </repository> - </repositories> ---> + </repositories> --> + <build> <pluginManagement> @@ -452,7 +427,7 @@ <execution> <id>cpd-check</id> <goals> - <goal>cpd-check</goal> + <!-- <goal>cpd-check</goal> --> </goals> <configuration> <printFailingErrors>true</printFailingErrors> @@ -525,7 +500,7 @@ </plugin> <!-- blackduck maven plugin --> - <!--plugin> + <!-- <plugin> <groupId>com.blackducksoftware.integration</groupId> <artifactId>hub-maven-plugin</artifactId> <version>1.4.0</version> @@ -543,7 +518,7 @@ </goals> </execution> </executions> - </plugin--> + </plugin> --> </plugins> </pluginManagement> <plugins> @@ -629,7 +604,7 @@ <artifactId>maven-dependency-plugin</artifactId> <version>3.0.0</version> <executions> - <execution> + <execution> <id>copy</id> <phase>compile</phase> <goals> @@ -640,7 +615,7 @@ <artifactItem> <groupId>org.openecomp.dcae.controller</groupId> <artifactId>dcae-controller-service-standardeventcollector-manager</artifactId> - <!--version>0.1.0-SNAPSHOT</version--> + <version>${project.version}</version> <type>zip</type> <classifier>runtime</classifier> @@ -649,15 +624,15 @@ <destFileName>manager.zip</destFileName> </artifactItem> </artifactItems> - <!-- other configurations here --> + </configuration> </execution> </executions> </plugin> - </plugins> + </plugins> - </build> +</build> <reporting> <plugins> @@ -697,19 +672,19 @@ <distributionManagement> <site> - <id>dcae-javadoc</id> + <id>ecomp-site</id> <!-- <url>file:LOCALDIR/${project.artifactId}/</url> --> - <url>dav:https://ecomp-nexus:8443/repository/dcae-javadoc/${project.artifactId}/${project.version}</url> + <url>dav:${nexusproxy}/${site.path}/</url> </site> <repository> <id>ecomp-releases</id> <name>Open eCOMP Release Repository</name> - <url>${nexusproxy}/content/repositories/releases/</url> + <url>${nexusproxy}/${releases.path}</url> </repository> <snapshotRepository> <id>ecomp-snapshots</id> <name>Open eCOMP Snapshot Repository</name> - <url>${nexusproxy}/content/repositories/snapshots/</url> + <url>${nexusproxy}/${snapshots.path}</url> </snapshotRepository> </distributionManagement> diff --git a/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java b/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java index 7c1ff22..869a5c7 100644 --- a/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java +++ b/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java @@ -1,324 +1,341 @@ -/*-
+/*- * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-
-
-import java.io.IOException;
-
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import java.util.Queue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import javax.servlet.ServletException;
-
-import org.apache.catalina.LifecycleException;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.openecomp.dcae.restapi.RestfulCollectorServlet;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-import com.att.nsa.apiServer.ApiServer;
-import com.att.nsa.apiServer.ApiServerConnector;
-import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint;
-import com.att.nsa.cmdLine.NsaCommandLineUtil;
-import com.att.nsa.drumlin.service.framework.DrumlinServlet;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jsonschema.exceptions.ProcessingException;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.github.fge.jsonschema.report.ProcessingMessage;
-import com.github.fge.jsonschema.report.ProcessingReport;
-import com.github.fge.jsonschema.util.JsonLoader;
-
-
-public class CommonStartup extends NsaBaseEndpoint implements Runnable
-{
- public static final String kConfig = "c";
-
- public static final String kSetting_Port = "collector.service.port";
- public static final int kDefault_Port = 8080;
-
- public static final String kSetting_SecurePort = "collector.service.secure.port";
- public static final int kDefault_SecurePort = -1;
-
- public static final String kSetting_KeystorePassfile = "collector.keystore.passwordfile";
- public static final String kDefault_KeystorePassfile = "../etc/passwordfile";
- public static final String kSetting_KeystoreFile = "collector.keystore.file.location";
- public static final String kDefault_KeystoreFile = "../etc/keystore";
- public static final String kSetting_KeyAlias = "collector.keystore.alias";
- public static final String kDefault_KeyAlias = "tomcat";
-
- public static final String kSetting_ProcessingConfigs = "collector.hpprocessing";
- protected static final String[] kDefault_ProcessingConfigs = new String[] { "etc/HPProcessingConfig.json" };
-
- public static final String kSetting_MaxQueuedEvents = "collector.inputQueue.maxPending";
- public static final int kDefault_MaxQueuedEvents = 1024*4;
-
- public static final String kSetting_schemaValidator = "collector.schema.checkflag";
- public static final int kDefault_schemaValidator = -1;
-
- public static final String kSetting_schemaFile = "collector.schema.file";
- public static final String kSetting_ExceptionConfig = "exceptionConfig";
-
- public static final String kSetting_dmaapStreamid = "collector.dmaap.streamid";
-
- public static final String kSetting_authflag = "header.authflag";
- public static final int kDefault_authflag = 0;
-
- public static final String kSetting_authid = "header.authid";
- public static final String kSetting_authpwd = "header.authpwd";
- public static final String kSetting_authstore = "header.authstore";
- public static final String kSetting_authlist = "header.authlist";
-
-
-
- public static final Logger inlog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.input" );
- public static final Logger oplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.output");
- public static final Logger eplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.error");
- public static final Logger metriclog = LoggerFactory.getLogger ("com.att.ecomp.metrics" );
-
- public static int schema_Validatorflag = -1;
- public static int authflag = 1;
- public static String schemaFile = null;
- public static String exceptionConfig = null;
- public static String cambriaConfigFile = null;
- private boolean listnerstatus = false;
- static String streamid = null;
-
- private CommonStartup(rrNvReadable settings) throws loadException, missingReqdSetting, IOException, rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, InterruptedException
- {
- final List<ApiServerConnector> connectors = new LinkedList<ApiServerConnector> ();
-
- if (settings.getInt ( kSetting_Port, kDefault_Port ) > 0)
- {
- // http service
- connectors.add (
- new ApiServerConnector.Builder ( settings.getInt ( kSetting_Port, kDefault_Port ) )
- .secure ( false )
- .build ()
- );
- }
-
- // optional https service
- final int securePort = settings.getInt(kSetting_SecurePort, kDefault_SecurePort);
- final String keystoreFile = settings.getString(kSetting_KeystoreFile, kDefault_KeystoreFile);
- final String keystorePasswordFile = settings.getString(kSetting_KeystorePassfile, kDefault_KeystorePassfile);
- final String keyAlias = settings.getString (kSetting_KeyAlias, kDefault_KeyAlias);
-
-
- if (securePort > 0)
- {
- final String kSetting_KeystorePass = readFile(keystorePasswordFile, Charset.defaultCharset());
- connectors.add(new ApiServerConnector.Builder(securePort)
- .secure(true)
- .keystorePassword(kSetting_KeystorePass)
- .keystoreFile(keystoreFile)
- .keyAlias(keyAlias)
- .build());
-
- }
-
- //Reading other config properties
-
- schema_Validatorflag = settings.getInt(kSetting_schemaValidator, kDefault_schemaValidator );
- if (schema_Validatorflag > 0){
- schemaFile = settings.getString(kSetting_schemaFile,null);
- }
- exceptionConfig = settings.getString(kSetting_ExceptionConfig, null);
- authflag = settings.getInt(CommonStartup.kSetting_authflag, CommonStartup.kDefault_authflag );
- String [] currentconffile = settings.getStrings (CommonStartup.kSetting_ProcessingConfigs, CommonStartup.kDefault_ProcessingConfigs ) ;
- cambriaConfigFile= currentconffile[0] ;
- streamid = settings.getString(kSetting_dmaapStreamid,null);
-
- fTomcatServer = new ApiServer.Builder(connectors, new RestfulCollectorServlet(settings))
- .encodeSlashes(true)
- .name("collector")
- .build();
-
-
- //Load override exception map
- CustomExceptionLoader.LoadMap();
- setListnerstatus(true);
- }
-
- public static void main ( String[] args )
- {
- try
- {
- // process command line arguments
- final Map<String, String> argMap = NsaCommandLineUtil.processCmdLine ( args, true );
- final String config = NsaCommandLineUtil.getSetting ( argMap, kConfig, "collector.properties" );
- final URL settingStream = DrumlinServlet.findStream ( config, CommonStartup.class );
-
- final nvReadableStack settings = new nvReadableStack ();
- settings.push ( new nvPropertiesFile ( settingStream ) );
- settings.push ( new nvReadableTable ( argMap ) );
-
- fProcessingInputQueue = new LinkedBlockingQueue<JSONObject> (CommonStartup.kDefault_MaxQueuedEvents);
- CommonStartup cs= new CommonStartup ( settings );
-
- Thread csmain = new Thread(cs);
- csmain.start();
-
- EventProcessor ep = new EventProcessor ();
- Thread epThread=new Thread(ep);
- epThread.start();
-
- //cs.startAndAwait ();
-
- }
- catch ( loadException | missingReqdSetting | IOException | invalidSettingValue | ServletException | InterruptedException e )
- {
- CommonStartup.eplog.error("FATAL_STARTUP_ERROR" + e.getMessage() );
- throw new RuntimeException ( e );
- }
- }
-
- public void run() {
- try {
- fTomcatServer.start ();
- } catch (LifecycleException | IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- fTomcatServer.await ();
- }
-
- public boolean isListnerstatus() {
- return listnerstatus;
- }
-
- public void setListnerstatus(boolean listnerstatus) {
- this.listnerstatus = listnerstatus;
- }
- public static Queue<JSONObject> getProcessingInputQueue ()
- {
- return fProcessingInputQueue;
- }
-
- public static class QueueFullException extends Exception
- {
- private static final long serialVersionUID = 1L;
- }
-
-
- public static void handleEvents ( JSONArray a ) throws QueueFullException, JSONException, IOException
- {
- final Queue<JSONObject> queue = getProcessingInputQueue ();
- try
- {
-
- CommonStartup.metriclog.info("EVENT_PUBLISH_START" );
- for (int i = 0; i < a.length(); i++) {
- if ( !queue.offer ( a.getJSONObject(i) ) ) {
- throw new QueueFullException ();
- }
-
- }
- log.debug("CommonStartup.handleEvents:EVENTS has been published successfully!");
- CommonStartup.metriclog.info("EVENT_PUBLISH_END");
- //ecomplogger.debug(secloggerMessageEnum.SEC_COLLECT_AND_PULIBISH_SUCCESS);
-
- }
- catch ( JSONException e ){
- throw e;
-
- }
- }
-
-
- static String readFile(String path, Charset encoding)
- throws IOException
- {
- byte[] encoded = Files.readAllBytes(Paths.get(path));
- String pwd = new String(encoded);
- return pwd.substring(0,pwd.length()-1);
- }
-
-
- public static String schemavalidate( String jsonData, String jsonSchema) {
- ProcessingReport report = null;
- String result = "false";
-
- try {
- //System.out.println("Applying schema: @<@<"+jsonSchema+">@>@ to data: #<#<"+jsonData+">#>#");
- log.trace("Schema validation for event:" + jsonData);
- JsonNode schemaNode = JsonLoader.fromString(jsonSchema);
- JsonNode data = JsonLoader.fromString(jsonData);
- JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
- JsonSchema schema = factory.getJsonSchema(schemaNode);
- report = schema.validate(data);
- } catch (JsonParseException e) {
- log.error("schemavalidate:JsonParseException for event:" + jsonData );
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- } catch (ProcessingException e) {
- log.error("schemavalidate:Processing exception for event:" + jsonData );
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- } catch (IOException e) {
- log.error("schemavalidate:IO exception; something went wrong trying to read json data for event:" + jsonData);
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- }
- if (report != null) {
- Iterator<ProcessingMessage> iter = report.iterator();
- while (iter.hasNext()) {
- ProcessingMessage pm = iter.next();
- log.trace("Processing Message: "+pm.getMessage());
- }
- result = String.valueOf(report.isSuccess());
- }
- try {
- log.trace("Validation Result:" +result + " Validation report:" + report);
- }
- catch (NullPointerException e){
- log.error("schemavalidate:NullpointerException on report");
- }
- return result;
- }
-
-
-
- static LinkedBlockingQueue<JSONObject> fProcessingInputQueue;
- private static ApiServer fTomcatServer = null;
- private static final Logger log = LoggerFactory.getLogger ( CommonStartup.class );
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.commonFunction; + + + +import java.io.IOException; + +import java.net.URL; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import java.util.Queue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; + +import javax.servlet.ServletException; + +import org.apache.catalina.LifecycleException; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.openecomp.dcae.restapi.RestfulCollectorServlet; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +import com.att.nsa.apiServer.ApiServer; +import com.att.nsa.apiServer.ApiServerConnector; +import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint; +import com.att.nsa.cmdLine.NsaCommandLineUtil; +import com.att.nsa.drumlin.service.framework.DrumlinServlet; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile; +import com.att.nsa.drumlin.till.nv.impl.nvReadableStack; +import com.att.nsa.drumlin.till.nv.impl.nvReadableTable; +import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue; +import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonNode; +import com.github.fge.jsonschema.exceptions.ProcessingException; +import com.github.fge.jsonschema.main.JsonSchema; +import com.github.fge.jsonschema.main.JsonSchemaFactory; +import com.github.fge.jsonschema.report.ProcessingMessage; +import com.github.fge.jsonschema.report.ProcessingReport; +import com.github.fge.jsonschema.util.JsonLoader; + + +public class CommonStartup extends NsaBaseEndpoint implements Runnable +{ + public static final String kConfig = "c"; + + public static final String kSetting_Port = "collector.service.port"; + public static final int kDefault_Port = 8080; + + public static final String kSetting_SecurePort = "collector.service.secure.port"; + public static final int kDefault_SecurePort = -1; + + public static final String kSetting_KeystorePassfile = "collector.keystore.passwordfile"; + public static final String kDefault_KeystorePassfile = "../etc/passwordfile"; + public static final String kSetting_KeystoreFile = "collector.keystore.file.location"; + public static final String kDefault_KeystoreFile = "../etc/keystore"; + public static final String kSetting_KeyAlias = "collector.keystore.alias"; + public static final String kDefault_KeyAlias = "tomcat"; + + public static final String kSetting_DmaapConfigs = "collector.dmaapfile"; + protected static final String[] kDefault_DmaapConfigs = new String[] { "/etc/DmaapConfig.json" }; + + public static final String kSetting_MaxQueuedEvents = "collector.inputQueue.maxPending"; + public static final int kDefault_MaxQueuedEvents = 1024*4; + + public static final String kSetting_schemaValidator = "collector.schema.checkflag"; + public static final int kDefault_schemaValidator = -1; + + public static final String kSetting_schemaFile = "collector.schema.file"; + public static final String kSetting_ExceptionConfig = "exceptionConfig"; + + public static final String kSetting_dmaapStreamid = "collector.dmaap.streamid"; + + public static final String kSetting_authflag = "header.authflag"; + public static final int kDefault_authflag = -1; + + public static final String kSetting_authid = "header.authid"; + public static final String kSetting_authpwd = "header.authpwd"; + public static final String kSetting_authstore = "header.authstore"; + public static final String kSetting_authlist = "header.authlist"; + + + + public static final Logger inlog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.input" ); + public static final Logger oplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.output"); + public static final Logger eplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.error"); + public static final Logger metriclog = LoggerFactory.getLogger ("com.att.ecomp.metrics" ); + + public static int schema_Validatorflag = -1; + public static int authflag = 1; + public static String schemaFile = null; + public static String exceptionConfig = null; + public static String cambriaConfigFile = null; + private boolean listnerstatus = false; + static String streamid = null; + + private CommonStartup(rrNvReadable settings) throws loadException, missingReqdSetting, IOException, rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, InterruptedException + { + final List<ApiServerConnector> connectors = new LinkedList<ApiServerConnector> (); + + if (settings.getInt ( kSetting_Port, kDefault_Port ) > 0) + { + // http service + connectors.add ( + new ApiServerConnector.Builder ( settings.getInt ( kSetting_Port, kDefault_Port ) ) + .secure ( false ) + .build () + ); + } + + // optional https service + final int securePort = settings.getInt(kSetting_SecurePort, kDefault_SecurePort); + final String keystoreFile = settings.getString(kSetting_KeystoreFile, kDefault_KeystoreFile); + final String keystorePasswordFile = settings.getString(kSetting_KeystorePassfile, kDefault_KeystorePassfile); + final String keyAlias = settings.getString (kSetting_KeyAlias, kDefault_KeyAlias); + + + if (securePort > 0) + { + final String kSetting_KeystorePass = readFile(keystorePasswordFile, Charset.defaultCharset()); + connectors.add(new ApiServerConnector.Builder(securePort) + .secure(true) + .keystorePassword(kSetting_KeystorePass) + .keystoreFile(keystoreFile) + .keyAlias(keyAlias) + .build()); + + } + + //Reading other config properties + + schema_Validatorflag = settings.getInt(kSetting_schemaValidator, kDefault_schemaValidator ); + if (schema_Validatorflag > 0){ + schemaFile = settings.getString(kSetting_schemaFile,null); + } + exceptionConfig = settings.getString(kSetting_ExceptionConfig, null); + authflag = settings.getInt(CommonStartup.kSetting_authflag, CommonStartup.kDefault_authflag ); + String [] currentconffile = settings.getStrings (CommonStartup.kSetting_DmaapConfigs, CommonStartup.kDefault_DmaapConfigs ) ; + cambriaConfigFile= currentconffile[0] ; + streamid = settings.getString(kSetting_dmaapStreamid,null); + + fTomcatServer = new ApiServer.Builder(connectors, new RestfulCollectorServlet(settings)) + .encodeSlashes(true) + .name("collector") + .build(); + + + //Load override exception map + CustomExceptionLoader.LoadMap(); + setListnerstatus(true); + } + + public static void main ( String[] args ) + { + ExecutorService executor = null; + try + { + // process command line arguments + final Map<String, String> argMap = NsaCommandLineUtil.processCmdLine ( args, true ); + final String config = NsaCommandLineUtil.getSetting ( argMap, kConfig, "collector.properties" ); + final URL settingStream = DrumlinServlet.findStream ( config, CommonStartup.class ); + + final nvReadableStack settings = new nvReadableStack (); + settings.push ( new nvPropertiesFile ( settingStream ) ); + settings.push ( new nvReadableTable ( argMap ) ); + + fProcessingInputQueue = new LinkedBlockingQueue<JSONObject> (CommonStartup.kDefault_MaxQueuedEvents); + + VESLogger.setUpEcompLogging(); + + CommonStartup cs= new CommonStartup ( settings ); + + Thread csmain = new Thread(cs); + csmain.start(); + + + EventProcessor ep = new EventProcessor (); + //Thread epThread=new Thread(ep); + //epThread.start(); + executor = Executors.newFixedThreadPool(20); + executor.execute(ep); + + } + catch ( loadException | missingReqdSetting | IOException | invalidSettingValue | ServletException | InterruptedException e ) + { + CommonStartup.eplog.error("FATAL_STARTUP_ERROR" + e.getMessage() ); + throw new RuntimeException ( e ); + } + finally + { + // This will make the executor accept no new threads + // and finish all existing threads in the queue + if (executor != null){ + executor.shutdown(); + } + + } + } + + public void run() { + try { + fTomcatServer.start (); + } catch (LifecycleException | IOException e) { + + e.printStackTrace(); + } + fTomcatServer.await (); + } + + public boolean isListnerstatus() { + return listnerstatus; + } + + public void setListnerstatus(boolean listnerstatus) { + this.listnerstatus = listnerstatus; + } + public static Queue<JSONObject> getProcessingInputQueue () + { + return fProcessingInputQueue; + } + + public static class QueueFullException extends Exception + { + private static final long serialVersionUID = 1L; + } + + + public static void handleEvents ( JSONArray a ) throws QueueFullException, JSONException, IOException + { + final Queue<JSONObject> queue = getProcessingInputQueue (); + try + { + + CommonStartup.metriclog.info("EVENT_PUBLISH_START" ); + for (int i = 0; i < a.length(); i++) { + if ( !queue.offer ( a.getJSONObject(i) ) ) { + throw new QueueFullException (); + } + + } + log.debug("CommonStartup.handleEvents:EVENTS has been published successfully!"); + CommonStartup.metriclog.info("EVENT_PUBLISH_END"); + //ecomplogger.debug(secloggerMessageEnum.SEC_COLLECT_AND_PULIBISH_SUCCESS); + + } + catch ( JSONException e ){ + throw e; + + } + } + + + static String readFile(String path, Charset encoding) + throws IOException + { + byte[] encoded = Files.readAllBytes(Paths.get(path)); + String pwd = new String(encoded); + return pwd.substring(0,pwd.length()-1); + } + + + public static String schemavalidate( String jsonData, String jsonSchema) { + ProcessingReport report = null; + String result = "false"; + + try { + //System.out.println("Applying schema: @<@<"+jsonSchema+">@>@ to data: #<#<"+jsonData+">#>#"); + log.trace("Schema validation for event:" + jsonData); + JsonNode schemaNode = JsonLoader.fromString(jsonSchema); + JsonNode data = JsonLoader.fromString(jsonData); + JsonSchemaFactory factory = JsonSchemaFactory.byDefault(); + JsonSchema schema = factory.getJsonSchema(schemaNode); + report = schema.validate(data); + } catch (JsonParseException e) { + log.error("schemavalidate:JsonParseException for event:" + jsonData ); + System.out.println(e.getMessage()); + return e.getMessage().toString(); + } catch (ProcessingException e) { + log.error("schemavalidate:Processing exception for event:" + jsonData ); + System.out.println(e.getMessage()); + return e.getMessage().toString(); + } catch (IOException e) { + log.error("schemavalidate:IO exception; something went wrong trying to read json data for event:" + jsonData); + System.out.println(e.getMessage()); + return e.getMessage().toString(); + } + if (report != null) { + Iterator<ProcessingMessage> iter = report.iterator(); + while (iter.hasNext()) { + ProcessingMessage pm = iter.next(); + log.trace("Processing Message: "+pm.getMessage()); + } + result = String.valueOf(report.isSuccess()); + } + try { + log.debug("Validation Result:" +result + " Validation report:" + report); + } + catch (NullPointerException e){ + log.error("schemavalidate:NullpointerException on report"); + } + return result; + } + + + + static LinkedBlockingQueue<JSONObject> fProcessingInputQueue; + private static ApiServer fTomcatServer = null; + private static final Logger log = LoggerFactory.getLogger ( CommonStartup.class ); +} diff --git a/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java b/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java index 0adf7b4..245f9b4 100644 --- a/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java +++ b/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java @@ -1,121 +1,131 @@ -/*-
+/*- * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.util.HashMap;
-
-import java.util.Map.Entry;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonIOException;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSyntaxException;
-
-
-public class CustomExceptionLoader {
-
- public static HashMap<String, JsonArray> map = null;
- private static final Logger log = LoggerFactory.getLogger ( CustomExceptionLoader.class );
-
- //For standalone test
- //LoadMap Invoked from servletSetup
- /*
- public static void main(String[] args) {
-
- System.out.println("CustomExceptionLoader.main --> Arguments -- ExceptionConfig file: " + args[0] + "StatusCode:" + args[1]+ " Error Msg:" + args[2]);
- CommonStartup.exceptionConfig = args[0];
-
- //Read the Custom exception JSON file into map
- LoadMap();
- System.out.println("CustomExceptionLoader.main --> Map info post LoadMap:" + map);
-
- String[] str= LookupMap(args[1],args[2]);
- if (! (str==null)) {
- System.out.println("CustomExceptionLoader.main --> Return from lookup function" + str[0] + "value:" + str[1]);
- }
-
- }
- */
-
- public static void LoadMap () {
-
- map = new HashMap<String, JsonArray>();
-
- try {
- JsonElement root = null;
- root = new JsonParser().parse(new FileReader(CommonStartup.exceptionConfig));
- JsonObject jsonObject = root.getAsJsonObject().get("code").getAsJsonObject();
-
- for (Entry<String, JsonElement> entry : jsonObject.entrySet()) {
- map.put(entry.getKey(), (JsonArray) entry.getValue());
- }
-
- log.debug("CustomExceptionLoader.LoadMap --> Map loaded - " + map);
- } catch (JsonIOException e) {
- e.printStackTrace();
- } catch (JsonSyntaxException e) {
- e.printStackTrace();
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public static String[] LookupMap (String error, String errormsg) {
-
- String[] retarray = null;
-
- log.debug("CustomExceptionLoader.LookupMap -->" + " HTTP StatusCode:" + error + " Msg:" + errormsg);
- try{
-
- JsonArray jarray = map.get(error);
- for (int i = 0; i < jarray.size(); i++) {
-
- JsonElement val = jarray.get(i).getAsJsonObject().get("Reason");
- JsonArray ec = (JsonArray) jarray.get(i).getAsJsonObject().get("ErrorCode");
- log.trace("CustomExceptionLoader.LookupMap Parameter -> Error msg : " + errormsg + " Reason text being matched:" + val);
- if (errormsg.contains(val.toString().replace("\"", ""))){
- log.trace("CustomExceptionLoader.LookupMap Successful! Exception matched to error message StatusCode:" + ec.get(0).toString() + "ErrorMessage:" + ec.get(1).toString());
- retarray = new String[2];
- retarray[0]=ec.get(0).toString();
- retarray[1]=ec.get(1).toString();
- return retarray;
- }
- }
-
- }
- catch (Exception e)
- {
- System.out.println(e.getMessage());
- }
-
- return retarray;
- }
-
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.commonFunction; + +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; + +import java.util.Map.Entry; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonIOException; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.JsonSyntaxException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CustomExceptionLoader { + + public static HashMap<String, JsonArray> map = null; + private static final Logger log = LoggerFactory.getLogger ( CustomExceptionLoader.class ); + //static private final VESLogger log = VESLogger.getLogger(CustomExceptionLoader.class, VESLogger.VES_AGENT); + + //For standalone test + //LoadMap Invoked from servletSetup + /* + public static void main(String[] args) { + + System.out.println("CustomExceptionLoader.main --> Arguments -- ExceptionConfig file: " + args[0] + "StatusCode:" + args[1]+ " Error Msg:" + args[2]); + CommonStartup.exceptionConfig = args[0]; + + //Read the Custom exception JSON file into map + LoadMap(); + System.out.println("CustomExceptionLoader.main --> Map info post LoadMap:" + map); + + String[] str= LookupMap(args[1],args[2]); + if (! (str==null)) { + System.out.println("CustomExceptionLoader.main --> Return from lookup function" + str[0] + "value:" + str[1]); + } + + } + */ + + public static void LoadMap () { + + map = new HashMap<String, JsonArray>(); + FileReader fr = null; + try { + JsonElement root = null; + fr = new FileReader(CommonStartup.exceptionConfig); + root = new JsonParser().parse(fr); + JsonObject jsonObject = root.getAsJsonObject().get("code").getAsJsonObject(); + + for (Entry<String, JsonElement> entry : jsonObject.entrySet()) { + map.put(entry.getKey(), (JsonArray) entry.getValue()); + } + + log.debug("CustomExceptionLoader.LoadMap --> Map loaded - " + map); + } catch (JsonIOException e) { + e.printStackTrace(); + } catch (JsonSyntaxException e) { + e.printStackTrace(); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + finally { + if (fr != null) { + try { + fr.close(); + } catch (IOException e) { + log.error("Error closing file reader stream : " +e.toString()); + } + } + } + } + + public static String[] LookupMap (String error, String errormsg) { + + String[] retarray = null; + + log.debug("CustomExceptionLoader.LookupMap -->" + " HTTP StatusCode:" + error + " Msg:" + errormsg); + try{ + + JsonArray jarray = map.get(error); + for (int i = 0; i < jarray.size(); i++) { + + JsonElement val = jarray.get(i).getAsJsonObject().get("Reason"); + JsonArray ec = (JsonArray) jarray.get(i).getAsJsonObject().get("ErrorCode"); + log.trace("CustomExceptionLoader.LookupMap Parameter -> Error msg : " + errormsg + " Reason text being matched:" + val); + if (errormsg.contains(val.toString().replace("\"", ""))){ + log.trace("CustomExceptionLoader.LookupMap Successful! Exception matched to error message StatusCode:" + ec.get(0).toString() + "ErrorMessage:" + ec.get(1).toString()); + retarray = new String[2]; + retarray[0]=ec.get(0).toString(); + retarray[1]=ec.get(1).toString(); + return retarray; + } + } + + } + catch (Exception e) + { + System.out.println(e.getMessage()); + } + + return retarray; + } + +} diff --git a/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java b/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java index 18e6d59..9cf7fc8 100644 --- a/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java +++ b/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java @@ -1,107 +1,118 @@ -/*-
+/*- * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-
-import java.util.HashMap;
-
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonIOException;
-
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSyntaxException;
-
-
-
-public class DmaapPropertyReader {
-
- private static DmaapPropertyReader instance = null;
-
-
- private static final Logger log = LoggerFactory.getLogger ( DmaapPropertyReader.class );
- public HashMap<String, String> dmaap_hash = new HashMap<String, String>();
-
- private DmaapPropertyReader(String CambriaConfigFile) {
-
- try {
- JsonElement root = null;
- root = new JsonParser().parse(new FileReader(CambriaConfigFile));
- JsonArray jsonObject = (JsonArray) root.getAsJsonObject().get("channels");
-
- for (int i = 0; i < jsonObject.size(); i++) {
- log.debug("TOPIC:" + jsonObject.get(i).getAsJsonObject().get("cambria.topic") +
- " HOST-URL:" + jsonObject.get(i).getAsJsonObject().get("cambria.url") +
- " HOSTS:" + jsonObject.get(i).getAsJsonObject().get("cambria.hosts") +
- " PWD:" + jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") +
- " USER:" + jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") +
- " NAME:" + jsonObject.get(i).getAsJsonObject().get("name") );
-
- String convertedname = jsonObject.get(i).getAsJsonObject().get("name").toString().replace("\"","");
- dmaap_hash.put(convertedname + ".cambria.topic", jsonObject.get(i).getAsJsonObject().get("cambria.topic").toString().replace("\"","") );
-
- if (jsonObject.get(i).getAsJsonObject().get("cambria.hosts") != null)
- {
- dmaap_hash.put(convertedname + ".cambria.hosts", jsonObject.get(i).getAsJsonObject().get("cambria.hosts").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("cambria.url") != null)
- {
- dmaap_hash.put(convertedname + ".cambria.url", jsonObject.get(i).getAsJsonObject().get("cambria.url").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") != null)
- {
- dmaap_hash.put(convertedname + ".basicAuthPassword", jsonObject.get(i).getAsJsonObject().get("basicAuthPassword").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") != null)
- {
- dmaap_hash.put(convertedname+ ".basicAuthUsername", jsonObject.get(i).getAsJsonObject().get("basicAuthUsername").toString().replace("\"","") );
- }
-
- }
- } catch (JsonIOException | JsonSyntaxException | FileNotFoundException e1) {
- e1.printStackTrace();
- log.error("Problem loading Dmaap Channel configuration file: " +e1.toString());
- }
-
-
- }
-
-
-
- public static synchronized DmaapPropertyReader getInstance(String ChannelConfig){
- if (instance == null) {
- instance = new DmaapPropertyReader(ChannelConfig);
- }
- return instance;
- }
-
-
- public String getKeyValue(String HashKey){
- return this.dmaap_hash.get(HashKey);
- }
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.commonFunction; + + +import java.io.FileNotFoundException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonIOException; + +import com.google.gson.JsonParser; +import com.google.gson.JsonSyntaxException; + + + +public class DmaapPropertyReader { + + private static DmaapPropertyReader instance = null; + + + private static final Logger log = LoggerFactory.getLogger ( DmaapPropertyReader.class ); + //static private final VESLogger log = VESLogger.getLogger(DmaapPropertyReader.class, VESLogger.VES_AGENT); + + public HashMap<String, String> dmaap_hash = new HashMap<String, String>(); + + private DmaapPropertyReader(String CambriaConfigFile) { + + FileReader fr = null; + try { + JsonElement root = null; + fr = new FileReader(CambriaConfigFile); + root = new JsonParser().parse(fr); + JsonArray jsonObject = (JsonArray) root.getAsJsonObject().get("channels"); + + for (int i = 0; i < jsonObject.size(); i++) { + log.debug("TOPIC:" + jsonObject.get(i).getAsJsonObject().get("cambria.topic") + + " HOST-URL:" + jsonObject.get(i).getAsJsonObject().get("cambria.url") + + " HOSTS:" + jsonObject.get(i).getAsJsonObject().get("cambria.hosts") + + " PWD:" + jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") + + " USER:" + jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") + + " NAME:" + jsonObject.get(i).getAsJsonObject().get("name") ); + + String convertedname = jsonObject.get(i).getAsJsonObject().get("name").toString().replace("\"",""); + dmaap_hash.put(convertedname + ".cambria.topic", jsonObject.get(i).getAsJsonObject().get("cambria.topic").toString().replace("\"","") ); + + if (jsonObject.get(i).getAsJsonObject().get("cambria.hosts") != null) + { + dmaap_hash.put(convertedname + ".cambria.hosts", jsonObject.get(i).getAsJsonObject().get("cambria.hosts").toString().replace("\"","") ); + } + if (jsonObject.get(i).getAsJsonObject().get("cambria.url") != null) + { + dmaap_hash.put(convertedname + ".cambria.url", jsonObject.get(i).getAsJsonObject().get("cambria.url").toString().replace("\"","") ); + } + if (jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") != null) + { + dmaap_hash.put(convertedname + ".basicAuthPassword", jsonObject.get(i).getAsJsonObject().get("basicAuthPassword").toString().replace("\"","") ); + } + if (jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") != null) + { + dmaap_hash.put(convertedname+ ".basicAuthUsername", jsonObject.get(i).getAsJsonObject().get("basicAuthUsername").toString().replace("\"","") ); + } + + } + } catch (JsonIOException | JsonSyntaxException | FileNotFoundException e1) { + e1.printStackTrace(); + log.error("Problem loading Dmaap Channel configuration file: " +e1.toString()); + } + finally { + if (fr != null) { + try { + fr.close(); + } catch (IOException e) { + log.error("Error closing file reader stream : " +e.toString()); + } + } + } + + + } + + + + public static synchronized DmaapPropertyReader getInstance(String ChannelConfig){ + if (instance == null) { + instance = new DmaapPropertyReader(ChannelConfig); + } + return instance; + } + + + public String getKeyValue(String HashKey){ + return this.dmaap_hash.get(HashKey); + } +} diff --git a/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java b/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java index 0e6f7e7..a5e90b9 100644 --- a/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java +++ b/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java @@ -20,40 +20,81 @@ package org.openecomp.dcae.commonFunction; -import org.json.JSONObject; +import java.text.SimpleDateFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.att.nsa.clock.SaClock; +import com.att.nsa.logging.LoggingContext; +import com.att.nsa.logging.log4j.EcompFields; + +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.TimeZone; +import java.util.UUID; + +import org.json.JSONArray; +import org.json.JSONObject; + public class EventProcessor implements Runnable { private static final Logger log = LoggerFactory.getLogger(EventProcessor.class); + + private static HashMap<String, String[]> streamid_hash = new HashMap<String, String[]>(); private JSONObject event = null; public EventProcessor() { log.debug("EventProcessor: Default Constructor"); + + String list[] = CommonStartup.streamid.split("\\|"); + for (int i = 0; i < list.length; i++) { + String domain = list[i].split("=")[0]; + //String streamIdList[] = list[i].split("=")[1].split(","); + String streamIdList[] = list[i].substring(list[i].indexOf("=") +1).split(","); + + log.debug("Domain: " + domain + " streamIdList:" + Arrays.toString(streamIdList)); + streamid_hash.put(domain, streamIdList); + } + } @Override public void run() { try { + event = CommonStartup.fProcessingInputQueue.take(); log.info("EventProcessor\tRemoving element: " + event); - + + //EventPublisher Ep=new EventPublisher(); while (event != null) { - // As long as the producer is running, - // we remove elements from the queue. + // As long as the producer is running we remove elements from the queue. - // log.info("EventProcessor\tRemoving element: " + - // this.queue.remove()); - - if (CommonStartup.streamid == null) { + //UUID uuid = UUID.fromString(event.get("VESuniqueId").toString()); + String uuid = event.get("VESuniqueId").toString(); + LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid.toString()); + localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () ); + + log.debug("event.VESuniqueId" + event.get("VESuniqueId") + "event.commonEventHeader.domain:" + event.getJSONObject("event").getJSONObject("commonEventHeader").getString("domain")); + String streamIdList[]=streamid_hash.get(event.getJSONObject("event").getJSONObject("commonEventHeader").getString("domain")); + log.debug("streamIdList:" + streamIdList); + + if (streamIdList.length == 0) { log.error("No StreamID defined for publish - Message dropped" + event.toString()); - } else { - EventPublisher.getInstance(CommonStartup.cambriaConfigFile, CommonStartup.streamid) - .sendEvent(event.toString(), CommonStartup.streamid); + } + + else { + for (int i=0; i < streamIdList.length; i++) + { + log.info("Invoking publisher for streamId:" + streamIdList[i]); + this.overrideEvent(); + EventPublisher.getInstance(streamIdList[i]).sendEvent(event); + + } } log.debug("Message published" + event.toString()); event = CommonStartup.fProcessingInputQueue.take(); + // log.info("EventProcessor\tRemoving element: " + this.queue.remove()); } } catch (InterruptedException e) { log.error("EventProcessor InterruptedException" + e.getMessage()); @@ -61,4 +102,20 @@ public class EventProcessor implements Runnable { } + + public void overrideEvent() + { + //Set collector timestamp in event payload before publish + final Date currentTime = new Date(); + final SimpleDateFormat sdf = new SimpleDateFormat("EEE, MM dd yyyy hh:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + + JSONArray additionalParametersarray = new JSONArray().put(new JSONObject().put("collectorTimeStamp", sdf.format(currentTime))); + JSONObject additionalParameter = new JSONObject().put("additionalParameters",additionalParametersarray ); + JSONObject commonEventHeaderkey = event.getJSONObject("event").getJSONObject("commonEventHeader"); + commonEventHeaderkey.put("internalHeaderFields", additionalParameter); + event.getJSONObject("event").put("commonEventHeader",commonEventHeaderkey); + log.debug("Modified event:" + event); + + } } diff --git a/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java b/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java index 4aa6da4..b40fb24 100644 --- a/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java +++ b/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java @@ -1,136 +1,181 @@ -/*-
+/*- * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.security.GeneralSecurityException;
-import java.net.MalformedURLException;
-
-import com.att.nsa.cambria.client.CambriaBatchingPublisher;
-import com.att.nsa.cambria.client.CambriaClientBuilders;
-
-
-public class EventPublisher {
-
- private static EventPublisher instance = null;
- private static CambriaBatchingPublisher pub = null;
-
- private String streamid = "";
- private static Logger log = LoggerFactory.getLogger(EventPublisher.class.getName());
-
-
-
- private EventPublisher(String CambriaConfigFile, String newstreamid) {
-
- this.streamid = newstreamid;
- try {
- String basicAuthUsername = DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthUsername");
- if (basicAuthUsername != null)
- {
- //log.debug(streamid+".cambria.url" + streamid+".cambria.topic");
- log.debug("URL:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.url") + "TOPIC:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.topic") + "AuthUser:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthUsername") + "Authpwd:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthPassword"));
-
- pub = new CambriaClientBuilders.PublisherBuilder ()
- .usingHosts (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.url"))
- .onTopic (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.topic"))
- .usingHttps()
- .authenticatedByHttp ( DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".basicAuthUsername"), DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".basicAuthPassword") )
- .build ();
- }
- else
- {
- //log.debug(streamid+".cambria.url" + streamid+".cambria.topic");
- log.debug("URL:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.url") + "TOPIC:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.topic"));
-
-
- pub = new CambriaClientBuilders.PublisherBuilder ()
- .usingHosts (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.hosts"))
- .onTopic (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.topic"))
- .build ();
-
- }
- }
- catch(GeneralSecurityException | MalformedURLException e ) {
- log.error("CambriaClientBuilders connection exception : " + e.getMessage());
- }
- catch(Exception e) {
- log.error("CambriaClientBuilders connection exception : " + e.getMessage());
- }
-
- }
-
- public static synchronized EventPublisher getInstance( String CambriaConfigFile, String streamid){
- if (instance == null) {
- instance = new EventPublisher(CambriaConfigFile, streamid);
- }
- return instance;
-
- }
-
- public synchronized void sendEvent(String event, String newstreamid ) {
-
- //Check if streamid changed
- if(! newstreamid.equals(this.streamid)) {
- closePublisher();
- instance = new EventPublisher (CommonStartup.cambriaConfigFile, newstreamid);
- }
-
-
- try {
- int pendingMsgs = pub.send("MyPartitionKey", event.toString());
-
- if(pendingMsgs > 100) {
- log.info("Pending Message Count="+pendingMsgs);
- }
-
- CommonStartup.oplog.info ("Event Published:" + event);
- } catch(IOException ioe) {
- log.error("Unable to publish event:" + event + " Exception:" + ioe.toString());
- }
-
-
-
-
- }
-
-
- public synchronized void closePublisher() {
-
- try {
- final List<?> stuck = pub.close(20, TimeUnit.SECONDS);
- if ( stuck.size () > 0 ) {
- log.error(stuck.size() + " messages unsent" );
- }
- }
- catch(InterruptedException ie) {
- log.error("Caught an Interrupted Exception on Close event");
- }catch(IOException ioe) {
- log.error("Caught IO Exception: " + ioe.toString());
- }
-
- }
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.commonFunction; + +import java.io.IOException; + +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import java.security.GeneralSecurityException; + +import com.att.nsa.cambria.client.CambriaBatchingPublisher; +import com.att.nsa.cambria.client.CambriaClientBuilders; +import com.att.nsa.clock.SaClock; +import com.att.nsa.logging.LoggingContext; +import com.att.nsa.logging.log4j.EcompFields; + + +public class EventPublisher { + + private static EventPublisher instance = null; + private static CambriaBatchingPublisher pub = null; + + private String streamid = ""; + private String ueburl=""; + private String topic=""; + private String authuser=""; + private String authpwd=""; + + private static Logger log = LoggerFactory.getLogger(EventPublisher.class); + + + private EventPublisher( String newstreamid) { + + this.streamid = newstreamid; + try { + ueburl=DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".cambria.url"); + + if (ueburl==null){ + ueburl= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".cambria.hosts"); + } + topic= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).getKeyValue(streamid+".cambria.topic"); + authuser = DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).getKeyValue(streamid+".basicAuthUsername"); + + + if (authuser != null) { + authpwd= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".basicAuthPassword"); + } + } + catch(Exception e) { + log.error("CambriaClientBuilders connection reader exception : " + e.getMessage()); + + } + + } + + + public static synchronized EventPublisher getInstance( String streamid){ + if (instance == null) { + instance = new EventPublisher(streamid); + } + if (!instance.streamid.equals(streamid)){ + instance.closePublisher(); + instance = new EventPublisher(streamid); + } + return instance; + + } + + + public synchronized void sendEvent(JSONObject event) { + + log.debug("EventPublisher.sendEvent: instance for publish is ready"); + + + if (event.has("VESuniqueId")) + { + String uuid = event.get("VESuniqueId").toString(); + LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid.toString()); + localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () ); + log.debug("Removing VESuniqueid object from event"); + event.remove("VESuniqueId"); + } + + + + + try { + + if (authuser != null) + { + log.debug("URL:" + ueburl + "TOPIC:" + topic + "AuthUser:" + authuser + "Authpwd:" + authpwd); + pub = new CambriaClientBuilders.PublisherBuilder () + .usingHosts (ueburl) + .onTopic (topic) + .usingHttps() + .authenticatedByHttp (authuser, authpwd ) + .logSendFailuresAfter(5) + // .logTo(log) + // .limitBatch(100, 10) + .build (); + } + else + { + + log.debug("URL:" + ueburl + "TOPIC:" + topic ); + pub = new CambriaClientBuilders.PublisherBuilder () + .usingHosts (ueburl) + .onTopic (topic) + // .logTo(log) + .logSendFailuresAfter(5) + // .limitBatch(100, 10) + .build (); + + } + + int pendingMsgs = pub.send("MyPartitionKey", event.toString()); + //this.wait(2000); + + if(pendingMsgs > 100) { + log.info("Pending Message Count="+pendingMsgs); + } + + //closePublisher(); + log.info("pub.send invoked - no error"); + CommonStartup.oplog.info ("URL:" + ueburl + "TOPIC:" + topic + "Event Published:" + event); + + } catch(IOException e) { + log.error("IOException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString()); + } catch (GeneralSecurityException e) { + // TODO Auto-generated catch block + log.error("GeneralSecurityException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString()); + } + catch (IllegalArgumentException e) + { + log.error("IllegalArgumentException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString()); + } + + } + + + public synchronized void closePublisher() { + + try { + if (pub!= null) + { + final List<?> stuck = pub.close(20, TimeUnit.SECONDS); + if ( stuck.size () > 0 ) { + log.error(stuck.size() + " messages unsent" ); + } + } + } + catch(InterruptedException ie) { + log.error("Caught an Interrupted Exception on Close event"); + }catch(IOException ioe) { + log.error("Caught IO Exception: " + ioe.toString()); + } + + } +} diff --git a/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java b/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java new file mode 100644 index 0000000..7a70013 --- /dev/null +++ b/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java @@ -0,0 +1,170 @@ +/*- + * ============LICENSE_START======================================================= + * PROJECT + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights + * reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.commonFunction; + +import java.net.InetAddress; +import java.net.UnknownHostException; + + +import java.util.UUID; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.att.nsa.clock.SaClock; + +import com.att.nsa.logging.LoggingContext; +import com.att.nsa.logging.LoggingContextFactory; +import com.att.nsa.logging.log4j.EcompFields; + +import jline.internal.Log; + + +public class VESLogger { + + public static final String VES_AGENT = "VES_AGENT"; + + public static Logger auditLog; + public static Logger metricsLog; + public static Logger errorLog; + public static Logger debugLog; + + // Common LoggingContext + private static LoggingContext commonLC = null; + // Thread-specific LoggingContext + private static LoggingContext threadLC = null; + public LoggingContext lc ; + + + + /** + * Returns the common LoggingContext instance that is the base context + * for all subsequent instances. + * + * @return the common LoggingContext + */ + public static LoggingContext getCommonLoggingContext() + { + if (commonLC == null) + { + commonLC = new LoggingContextFactory.Builder().build(); + final UUID uuid = java.util.UUID.randomUUID(); + + commonLC.put("requestId", uuid.toString()); + } + return commonLC; + } + + /** + * Get a logging context for the current thread that's based on the common logging context. + * Populate the context with context-specific values. + * + * @return a LoggingContext for the current thread + */ + public static LoggingContext getLoggingContextForThread (UUID aUuid) + { + // note that this operation requires everything from the common context + // to be (re)copied into the target context. That seems slow, but it actually + // helps prevent the thread from overwriting supposedly common data. It also + // should be fairly quick compared with the overhead of handling the actual + // service call. + + threadLC = new LoggingContextFactory.Builder(). + withBaseContext ( getCommonLoggingContext () ). + build(); + // Establish the request-specific UUID, as long as we are here... + threadLC.put("requestId", aUuid.toString()); + threadLC.put ( EcompFields.kEndTimestamp, SaClock.now () ); + + return threadLC; + } + + /** + * Get a logging context for the current thread that's based on the common logging context. + * Populate the context with context-specific values. + * + * @return a LoggingContext for the current thread + */ + public static LoggingContext getLoggingContextForThread (String aUuid) + { + // note that this operation requires everything from the common context + // to be (re)copied into the target context. That seems slow, but it actually + // helps prevent the thread from overwriting supposedly common data. It also + // should be fairly quick compared with the overhead of handling the actual + // service call. + + threadLC = new LoggingContextFactory.Builder(). + withBaseContext ( getCommonLoggingContext () ). + build(); + // Establish the request-specific UUID, as long as we are here... + threadLC.put("requestId", aUuid); + threadLC.put ( "statusCode", "COMPLETED" ); + threadLC.put ( EcompFields.kEndTimestamp, SaClock.now () ); + return threadLC; + } + public static void setUpEcompLogging() + { + + + // Create ECOMP Logger instances + auditLog = LoggerFactory.getLogger("com.att.ecomp.audit"); + metricsLog = LoggerFactory.getLogger("com.att.ecomp.metrics"); + debugLog = LoggerFactory.getLogger("com.att.ecomp.debug"); + errorLog = LoggerFactory.getLogger("com.att.ecomp.error"); + + + final LoggingContext lc = getCommonLoggingContext(); + + String ipAddr = "127.0.0.1"; + String hostname = "localhost"; + try + { + final InetAddress ip = InetAddress.getLocalHost (); + hostname = ip.getCanonicalHostName (); + ipAddr = ip.getHostAddress(); + } + catch ( UnknownHostException x ) + { + Log.debug(x.getMessage()); + } + + lc.put ( "serverName", hostname ); + lc.put ( "serviceName", "VESCollecor" ); + lc.put ( "statusCode", "RUNNING" ); + lc.put ( "targetEntity", "NULL"); + lc.put ( "targetServiceName", "NULL"); + lc.put ( "server", hostname ); + lc.put ( "serverIpAddress", ipAddr.toString () ); + + // instance UUID is meaningless here, so we just create a new one each time the + // server starts. One could argue each new instantiation of the service should + // have a new instance ID. + lc.put ( "instanceUuid", "" ); + lc.put ( "severity", "" ); + lc.put ( EcompFields.kEndTimestamp, SaClock.now () ); + lc.put("EndTimestamp", SaClock.now ()); + lc.put("partnerName", "NA"); + + + } + + +} diff --git a/src/main/java/org/openecomp/dcae/restapi/RestfulCollectorServlet.java b/src/main/java/org/openecomp/dcae/restapi/RestfulCollectorServlet.java index bd9be55..9cee97c 100644 --- a/src/main/java/org/openecomp/dcae/restapi/RestfulCollectorServlet.java +++ b/src/main/java/org/openecomp/dcae/restapi/RestfulCollectorServlet.java @@ -1,146 +1,150 @@ -
-/*
+ +/* * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.restapi;
-
-import java.io.IOException;
-import java.net.URL;
-
-import javax.servlet.ServletException;
-
-import org.apache.tomcat.util.codec.binary.Base64;
-import org.openecomp.dcae.commonFunction.CommonStartup;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.att.nsa.apiServer.CommonServlet;
-import com.att.nsa.configs.ConfigDbException;
-import com.att.nsa.drumlin.service.framework.DrumlinErrorHandler;
-import com.att.nsa.drumlin.service.framework.context.DrumlinRequestContext;
-import com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter;
-import com.att.nsa.drumlin.service.framework.routing.playish.DrumlinPlayishRoutingFileSource;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.att.nsa.security.NsaAuthenticator;
-
-import com.att.nsa.security.authenticators.SimpleAuthenticator;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-public class RestfulCollectorServlet extends CommonServlet
-{
- String authid = null;
- String authpwd = null;
- String authlist = null;
- public RestfulCollectorServlet ( rrNvReadable settings ) throws loadException, missingReqdSetting
- {
- super ( settings, "collector", false );
- authid = settings.getString(CommonStartup.kSetting_authid,null);
- if (authid != null)
- {
- String authpwdtemp = settings.getString(CommonStartup.kSetting_authpwd,null);
- authpwd = new String(Base64.decodeBase64(authpwdtemp));
- }
- authlist = settings.getString(CommonStartup.kSetting_authlist,null);
- }
-
-
- /**
- * This is called once at server start. Use it to init any shared objects and setup the route mapping.
- */
- @Override
- protected void servletSetup () throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException
- {
- super.servletSetup ();
-
- try
- {
- // the base class provides a bunch of things like API authentication and ECOMP compliant
- // logging. The Restful Collector likely doesn't need API authentication, so for now,
- // we init the base class services with an in-memory (and empty!) config DB.
- commonServletSetup ( ConfigDbType.MEMORY );
-
-
-
- // setup the servlet routing and error handling
- final DrumlinRequestRouter drr = getRequestRouter ();
-
- // you can tell the request router what to do when a particular kind of exception is thrown.
- drr.setHandlerForException( IllegalArgumentException.class, new DrumlinErrorHandler()
- {
- @Override
- public void handle ( DrumlinRequestContext ctx, Throwable cause )
- {
- sendJsonReply ( ctx, HttpStatusCodes.k400_badRequest, cause.getMessage() );
- }
- });
-
- // load the routes from the config file
- final URL routes = findStream ( "routes.conf" );
- if ( routes == null ) throw new rrNvReadable.missingReqdSetting ( "No routing configuration." );
- final DrumlinPlayishRoutingFileSource drs = new DrumlinPlayishRoutingFileSource ( routes );
- drr.addRouteSource ( drs );
-
-
- NsaAuthenticator<NsaSimpleApiKey> NsaAuth = new SimpleAuthenticator ();
- if (authlist != null)
- {
- String authpair[] = authlist.split("\\|");
- for (String pair: authpair) {
- String lineid[] = pair.split(",");
- String listauthid = lineid[0];
- String listauthpwd = new String(Base64.decodeBase64(lineid[1]));
- ((SimpleAuthenticator) NsaAuth).add(listauthid,listauthpwd);
- }
-
- }
- else if (authid != null)
- {
- ((SimpleAuthenticator) NsaAuth).add(authid,authpwd);
- }
- else
- {
- //add a default test account
- ((SimpleAuthenticator) NsaAuth).add("admin","collectorpasscode");
- }
- this.getSecurityManager().addAuthenticator(NsaAuth);
- log.info ( "Restful Collector Servlet is up." );
- }
- catch ( SecurityException e )
- {
- throw new ServletException ( e );
- }
- catch ( IOException e )
- {
- throw new ServletException ( e );
- }
- catch ( ConfigDbException e )
- {
- throw new ServletException ( e );
- }
- }
-
-
-
- private static final long serialVersionUID = 1L;
- private static final Logger log = LoggerFactory.getLogger ( RestfulCollectorServlet.class );
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.restapi; + +import java.io.IOException; +import java.net.URL; + +import javax.servlet.ServletException; + +import org.apache.tomcat.util.codec.binary.Base64; +import org.openecomp.dcae.commonFunction.CommonStartup; +import org.openecomp.dcae.commonFunction.VESLogger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.att.nsa.apiServer.CommonServlet; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.framework.DrumlinErrorHandler; +import com.att.nsa.drumlin.service.framework.context.DrumlinRequestContext; +import com.att.nsa.drumlin.service.framework.routing.DrumlinRequestRouter; +import com.att.nsa.drumlin.service.framework.routing.playish.DrumlinPlayishRoutingFileSource; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.drumlin.till.nv.rrNvReadable; +import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.NsaAuthenticator; + +import com.att.nsa.security.authenticators.SimpleAuthenticator; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +public class RestfulCollectorServlet extends CommonServlet +{ + String authid = null; + String authpwd = null; + String authlist = null; + + public RestfulCollectorServlet ( rrNvReadable settings ) throws loadException, missingReqdSetting + { + super ( settings, "collector", false ); + authid = settings.getString(CommonStartup.kSetting_authid,null); + if (authid != null) + { + String authpwdtemp = settings.getString(CommonStartup.kSetting_authpwd,null); + authpwd = new String(Base64.decodeBase64(authpwdtemp)); + } + authlist = settings.getString(CommonStartup.kSetting_authlist,null); + } + + + /** + * This is called once at server start. Use it to init any shared objects and setup the route mapping. + */ + @Override + protected void servletSetup () throws rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException + { + super.servletSetup (); + + try + { + // the base class provides a bunch of things like API authentication and ECOMP compliant + // logging. The Restful Collector likely doesn't need API authentication, so for now, + // we init the base class services with an in-memory (and empty!) config DB. + commonServletSetup ( ConfigDbType.MEMORY ); + + VESLogger.setUpEcompLogging(); + + // setup the servlet routing and error handling + final DrumlinRequestRouter drr = getRequestRouter (); + + // you can tell the request router what to do when a particular kind of exception is thrown. + drr.setHandlerForException( IllegalArgumentException.class, new DrumlinErrorHandler() + { + @Override + public void handle ( DrumlinRequestContext ctx, Throwable cause ) + { + sendJsonReply ( ctx, HttpStatusCodes.k400_badRequest, cause.getMessage() ); + } + }); + + // load the routes from the config file + final URL routes = findStream ( "routes.conf" ); + if ( routes == null ) throw new rrNvReadable.missingReqdSetting ( "No routing configuration." ); + final DrumlinPlayishRoutingFileSource drs = new DrumlinPlayishRoutingFileSource ( routes ); + drr.addRouteSource ( drs ); + + if (CommonStartup.authflag > 0) { + NsaAuthenticator<NsaSimpleApiKey> NsaAuth = new SimpleAuthenticator (); + if (authlist != null) + { + String authpair[] = authlist.split("\\|"); + for (String pair: authpair) { + String lineid[] = pair.split(","); + String listauthid = lineid[0]; + String listauthpwd = new String(Base64.decodeBase64(lineid[1])); + ((SimpleAuthenticator) NsaAuth).add(listauthid,listauthpwd); + } + + } + else if (authid != null) + { + ((SimpleAuthenticator) NsaAuth).add(authid,authpwd); + } + else + { + //add a default test account + ((SimpleAuthenticator) NsaAuth).add("admin","collectorpasscode"); + } + this.getSecurityManager().addAuthenticator(NsaAuth); + } + + log.info ( "Restful Collector Servlet is up." ); + } + catch ( SecurityException e ) + { + throw new ServletException ( e ); + } + catch ( IOException e ) + { + throw new ServletException ( e ); + } + catch ( ConfigDbException e ) + { + throw new ServletException ( e ); + } + } + + + + private static final long serialVersionUID = 1L; + private static final Logger log = LoggerFactory.getLogger ( RestfulCollectorServlet.class ); +} diff --git a/src/main/java/org/openecomp/dcae/restapi/endpoints/EventReceipt.java b/src/main/java/org/openecomp/dcae/restapi/endpoints/EventReceipt.java index 54512e7..173b4d0 100644 --- a/src/main/java/org/openecomp/dcae/restapi/endpoints/EventReceipt.java +++ b/src/main/java/org/openecomp/dcae/restapi/endpoints/EventReceipt.java @@ -1,243 +1,338 @@ -/*-
+/*- * ============LICENSE_START======================================================= * PROJECT * ================================================================================ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and * limitations under the License. - * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.restapi.endpoints;
-
-
-import java.io.FileReader;
-import java.io.IOException;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.openecomp.dcae.commonFunction.CommonStartup;
-import org.openecomp.dcae.commonFunction.CustomExceptionLoader;
-import org.openecomp.dcae.commonFunction.CommonStartup.QueueFullException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint;
-import com.att.nsa.drumlin.service.framework.context.DrumlinRequestContext;
-import com.att.nsa.drumlin.service.standards.HttpStatusCodes;
-import com.att.nsa.drumlin.service.standards.MimeTypes;
-import com.att.nsa.security.db.simple.NsaSimpleApiKey;
-
-import com.google.gson.JsonParser;
-
-
-public class EventReceipt extends NsaBaseEndpoint {
- static String valresult = null;
- static JSONObject customerror = null;
-
- public static void receiveSingleEvent(DrumlinRequestContext ctx) throws IOException {
-
- NsaSimpleApiKey retkey = null;
- JSONObject jsonObject = null;
- //String br = new BufferedReader(new InputStreamReader(ctx.request().getBodyStream())).readLine();
-
- try {
-
-
- //JsonElement msg = new JsonParser().parse(new BufferedReader(new InputStreamReader(ctx.request().getBodyStream())).readLine());
- jsonObject = new JSONObject ( new JSONTokener ( ctx.request ().getBodyStream () ) );
-
- CommonStartup.inlog.info("Input Messsage: " + jsonObject);
- log.info("Input Messsage: " + jsonObject);
-
-
- try {
-
- if (CommonStartup.authflag == 1) {
- retkey = NsaBaseEndpoint.getAuthenticatedUser(ctx);
- }
- } catch (NullPointerException x) {
-
- log.info("Invalid user request " + ctx.request().getContentType() + " Message:" + jsonObject.toString());
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Unauthorized user" + x.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Invalid user");
- return;
-
- }
- if (retkey != null || CommonStartup.authflag == 0) {
-
- if (CommonStartup.schema_Validatorflag > 0) {
- String schema = new JsonParser().parse(new FileReader(CommonStartup.schemaFile)).toString();
-
- valresult = CommonStartup.schemavalidate(jsonObject.toString(), schema);
- if (valresult.equals("true")) {
- log.info("Validation successful");
- } else if (valresult.equals("false")) {
- log.info("Validation failed");
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Schema validation failed");
- return;
- } else {
- log.error("Validation errored" + valresult);
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest,"Couldn't parse JSON object");
- return;
- }
- }
-
- // reject anything that's not JSON
- if (!ctx.request().getContentType().equalsIgnoreCase("application/json")) {
- log.info("Rejecting request with content type " + ctx.request().getContentType() + " Message:"
- + jsonObject);
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest,
- "Incorrect message content-type; only accepts application/json messages");
- return;
- }
- final JSONArray jsonArray = new JSONArray().put(jsonObject);
-
- CommonStartup.handleEvents(jsonArray);
- } else {
- log.info("Unauthorized request " + ctx.request().getContentType() + " Message:" + jsonObject.toString());
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Unauthorized user");
- return;
- }
-
- } catch (JSONException | NullPointerException | IOException x) {
- log.error("Couldn't parse JSON Array - HttpStatusCodes.k400_badRequest" + HttpStatusCodes.k400_badRequest
- + " Message:" + x.getMessage());
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Invalid user request " + x.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object");
- return;
- } catch (QueueFullException e) {
- e.printStackTrace();
- log.error("Collector internal queue full :" + e.getMessage());
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: QueueFull" + e.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k503_serviceUnavailable, "Queue full");
- return;
- }
- log.info("MessageAccepted and k200_ok to be sent");
- ctx.response().sendErrorAndBody(HttpStatusCodes.k200_ok, "Message Accepted", MimeTypes.kAppJson);
- }
-
- public static void receiveMultipleEvents(DrumlinRequestContext ctx) throws IOException {
- // the request body carries events. assume for now it's an array
- // of json objects that fits in memory. (See cambria's parsing for handling large messages)
-
- NsaSimpleApiKey retkey = null;
-
- JSONArray jsonArray = null;
-
- try {
-
- //String br = new BufferedReader(new InputStreamReader(ctx.request().getBodyStream())).readLine();
- //JsonElement msg = new JsonParser().parse(new BufferedReader(new InputStreamReader(ctx.request().getBodyStream())).readLine());
- jsonArray = new JSONArray ( new JSONTokener ( ctx.request ().getBodyStream () ) );
-
- CommonStartup.inlog.info("Input Messsage: " + jsonArray);
- log.info("Input Messsage: " + jsonArray);
-
- try {
- if (CommonStartup.authflag == 1) {
- retkey = NsaBaseEndpoint.getAuthenticatedUser(ctx);
- }
- } catch (NullPointerException x) {
- log.info("Invalid user request " + ctx.request().getContentType() + " Message:" + jsonArray.toString());
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Unauthorized user" + x.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Invalid user");
- return;
- }
-
- if (retkey != null || CommonStartup.authflag == 0) {
- if (CommonStartup.schema_Validatorflag > 0) {
-
- String schema = new JsonParser().parse(new FileReader(CommonStartup.schemaFile)).toString();
-
- for (int i = 0; i < jsonArray.length(); i++) {
- valresult = CommonStartup.schemavalidate(jsonArray.getJSONObject(i).toString(), schema);
- if (valresult.equals("false")) {
- log.info("Validation failed");
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest,"Standard schema validation failed");
- return;
- } else if (!valresult.equals("true")) {
- log.error("Validation errored" + valresult);
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest,"Couldn't parse JSON object");
- return;
-
- }
- }
- log.info("Validation successful for all events in batch");
-
- }
- // reject anything that's not JSON
- if (!ctx.request().getContentType().equalsIgnoreCase("application/json")) {
- log.info("Rejecting request with content type " + ctx.request().getContentType() + " Message:"
- + jsonArray.toString());
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest,
- "Incorrect message content-type; only accepts application/json messages");
- return;
- }
-
- CommonStartup.handleEvents(jsonArray);
- } else {
- log.info("Unauthorized request " + ctx.request().getContentType() + " Message:" + jsonArray.toString());
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Unauthorized request");
- return;
- }
- } catch (JSONException | NullPointerException | IOException x) {
- log.error("Couldn't parse JSON Array - HttpStatusCodes.k400_badRequest" + HttpStatusCodes.k400_badRequest
- + " Message:" + x.getMessage());
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Invalid user request " + x.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object");
- return;
- } catch (QueueFullException e) {
- e.printStackTrace();
- log.error("Collector internal HP queue full :" + e.getMessage() );
- CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: QueueFull" + e.toString() );
- respondWithCustomMsginJson(ctx, HttpStatusCodes.k503_serviceUnavailable, "Queue full");
- return;
- }
-
- ctx.response().sendErrorAndBody(HttpStatusCodes.k200_ok, "Message Accepted", MimeTypes.kAppJson);
- }
-
- public static void respondWithCustomMsginJson(DrumlinRequestContext ctx, int sc, String msg) {
- String[] str = null;
- String ExceptionType = "GeneralException";
-
- str = CustomExceptionLoader.LookupMap(String.valueOf(sc), msg);
- System.out.println("Post CustomExceptionLoader.LookupMap" + str);
-
- if (str != null) {
-
- if (str[0].matches("SVC")) {
- ExceptionType = "ServiceException";
- } else if (str[1].matches("POL")) {
- ExceptionType = "PolicyException";
- }
-
- JSONObject jb = new JSONObject().put("requestError",
- new JSONObject().put(ExceptionType, new JSONObject().put("MessagID", str[0]).put("text", str[1])));
-
- log.debug("Constructed json error : " + jb.toString());
- ctx.response().sendErrorAndBody(sc, jb.toString(), MimeTypes.kAppJson);
- } else {
- JSONObject jb = new JSONObject().put("requestError",
- new JSONObject().put(ExceptionType, new JSONObject().put("Status", sc).put("Error", msg)));
- ctx.response().sendErrorAndBody(sc, jb.toString(), MimeTypes.kAppJson);
- }
-
- }
-
- private static final Logger log = LoggerFactory.getLogger(EventReceipt.class);
-
-}
+ * ============LICENSE_END========================================================= + */ + +package org.openecomp.dcae.restapi.endpoints; + +import java.io.FileReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.io.InputStream; +import java.util.UUID; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.openecomp.dcae.commonFunction.CommonStartup; +import org.openecomp.dcae.commonFunction.CustomExceptionLoader; + +import org.openecomp.dcae.commonFunction.VESLogger; +import org.openecomp.dcae.commonFunction.CommonStartup.QueueFullException; + +import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint; +import com.att.nsa.clock.SaClock; +import com.att.nsa.drumlin.service.framework.context.DrumlinRequestContext; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.drumlin.service.standards.MimeTypes; +import com.att.nsa.logging.LoggingContext; + +import com.att.nsa.logging.log4j.EcompFields; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; + +import com.google.gson.JsonParser; + +public class EventReceipt extends NsaBaseEndpoint { + static String valresult = null; + static JSONObject customerror = null; + + private static final Logger log = LoggerFactory.getLogger(EventReceipt.class); + + public static void receiveSingleEvent(DrumlinRequestContext ctx) throws IOException { + + + NsaSimpleApiKey retkey = null; + JSONObject jsonObject = null; + FileReader fr = null; + InputStream istr = null; + // String br = new BufferedReader(new + // InputStreamReader(ctx.request().getBodyStream())).readLine(); + + + final UUID uuid = java.util.UUID.randomUUID(); + LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid); + localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () ); + + log.debug ("Request recieved :" + ctx.request().getRemoteAddress()); + + try { + + // JsonElement msg = new JsonParser().parse(new BufferedReader(new + // InputStreamReader(ctx.request().getBodyStream())).readLine()); + + istr = ctx.request().getBodyStream(); + jsonObject = new JSONObject(new JSONTokener(istr)); + + CommonStartup.inlog.info(ctx.request().getRemoteAddress() + "Input Messsage: " + jsonObject); + log.info(ctx.request().getRemoteAddress() + "Input Messsage: " + jsonObject); + + try { + + if (CommonStartup.authflag == 1) { + retkey = NsaBaseEndpoint.getAuthenticatedUser(ctx); + } + } catch (NullPointerException x) { + + log.info( + "Invalid user request " + ctx.request().getContentType() + " Message:" + jsonObject.toString()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Unauthorized user" + x.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Invalid user"); + return; + + } + if (retkey != null || CommonStartup.authflag == 0) { + + if (CommonStartup.schema_Validatorflag > 0) { + + fr = new FileReader(CommonStartup.schemaFile); + String schema = new JsonParser().parse(fr).toString(); + + valresult = CommonStartup.schemavalidate(jsonObject.toString(), schema); + if (valresult.equals("true")) { + log.info("Validation successful"); + } else if (valresult.equals("false")) { + log.info("Validation failed"); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Schema validation failed"); + return; + } else { + log.error("Validation errored" + valresult); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object"); + return; + } + } + + // reject anything that's not JSON + if (!ctx.request().getContentType().equalsIgnoreCase("application/json")) { + log.info("Rejecting request with content type " + ctx.request().getContentType() + " Message:" + + jsonObject); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, + "Incorrect message content-type; only accepts application/json messages"); + return; + } + jsonObject.put("VESuniqueId", uuid); + final JSONArray jsonArray = new JSONArray().put(jsonObject); + + CommonStartup.handleEvents(jsonArray); + } else { + log.info( + "Unauthorized request " + ctx.request().getContentType() + " Message:" + jsonObject.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Unauthorized user"); + return; + } + + } catch (JSONException | NullPointerException | IOException x) { + log.error("Couldn't parse JSON Array - HttpStatusCodes.k400_badRequest" + HttpStatusCodes.k400_badRequest + + " Message:" + x.getMessage()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Invalid user request " + x.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object"); + return; + } catch (QueueFullException e) { + e.printStackTrace(); + log.error("Collector internal queue full :" + e.getMessage()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: QueueFull" + e.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k503_serviceUnavailable, "Queue full"); + return; + } finally { + if (fr != null) { + safeClose(fr); + } + if (istr != null) { + safeClose(istr); + } + } + log.info("MessageAccepted and k200_ok to be sent"); + ctx.response().sendErrorAndBody(HttpStatusCodes.k200_ok, "Message Accepted", MimeTypes.kAppJson); + } + + public static void receiveMultipleEvents(DrumlinRequestContext ctx) throws IOException { + // the request body carries events. assume for now it's an array + // of json objects that fits in memory. (See cambria's parsing for + // handling large messages) + + NsaSimpleApiKey retkey = null; + + JSONArray jsonArray = null; + JSONArray jsonArrayMod = new JSONArray(); + JSONObject event = null; + JSONObject jsonObject = null; + FileReader fr = null; + InputStream istr = null; + + try { + + // String br = new BufferedReader(new + // InputStreamReader(ctx.request().getBodyStream())).readLine(); + // JsonElement msg = new JsonParser().parse(new BufferedReader(new + // InputStreamReader(ctx.request().getBodyStream())).readLine()); + // jsonArray = new JSONArray ( new JSONTokener ( ctx.request + // ().getBodyStream () ) ); + + final UUID uuid = java.util.UUID.randomUUID(); + LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid); + localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () ); + + + log.debug ("Request recieved :" + ctx.request().getRemoteAddress()); + + istr = ctx.request().getBodyStream(); + jsonObject = new JSONObject(new JSONTokener(istr)); + // jsonObject = new JSONObject ( new JSONTokener ( ctx.request + // ().getBodyStream () ) ); + + CommonStartup.inlog.info(ctx.request().getRemoteAddress() + "Input Messsage: " + jsonObject); + log.info("Input Messsage: " + jsonObject); + + try { + if (CommonStartup.authflag == 1) { + retkey = NsaBaseEndpoint.getAuthenticatedUser(ctx); + } + } catch (NullPointerException x) { + log.info( + "Invalid user request " + ctx.request().getContentType() + " Message:" + jsonObject.toString()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Unauthorized user" + x.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Invalid user"); + return; + } + + if (retkey != null || CommonStartup.authflag == 0) { + if (CommonStartup.schema_Validatorflag > 0) { + + fr = new FileReader(CommonStartup.schemaFile); + String schema = new JsonParser().parse(fr).toString(); + + valresult = CommonStartup.schemavalidate(jsonObject.toString(), schema); + if (valresult.equals("true")) { + log.info("Validation successful"); + } else if (valresult.equals("false")) { + log.info("Validation failed"); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Schema validation failed"); + + return; + } else { + log.error("Validation errored" + valresult); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object"); + return; + + } + jsonArray = jsonObject.getJSONArray("eventList"); + log.info("Validation successful for all events in batch"); + for (int i = 0; i < jsonArray.length(); i++) { + event = new JSONObject().put("event", jsonArray.getJSONObject(i)); + event.put("VESuniqueId", uuid + "-"+i); + jsonArrayMod.put(event); + } + + log.info("Modified jsonarray:" + jsonArrayMod.toString()); + + } + // reject anything that's not JSON + if (!ctx.request().getContentType().equalsIgnoreCase("application/json")) { + log.info("Rejecting request with content type " + ctx.request().getContentType() + " Message:" + + jsonObject); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, + "Incorrect message content-type; only accepts application/json messages"); + return; + } + + CommonStartup.handleEvents(jsonArrayMod); + } else { + log.info( + "Unauthorized request " + ctx.request().getContentType() + " Message:" + jsonObject.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k401_unauthorized, "Unauthorized user"); + return; + } + } catch (JSONException | NullPointerException | IOException x) { + log.error("Couldn't parse JSON Array - HttpStatusCodes.k400_badRequest" + HttpStatusCodes.k400_badRequest + + " Message:" + x.getMessage()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: Invalid user request " + x.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k400_badRequest, "Couldn't parse JSON object"); + return; + } catch (QueueFullException e) { + e.printStackTrace(); + log.error("Collector internal queue full :" + e.getMessage()); + CommonStartup.eplog.info("EVENT_RECEIPT_FAILURE: QueueFull" + e.toString()); + respondWithCustomMsginJson(ctx, HttpStatusCodes.k503_serviceUnavailable, "Queue full"); + return; + } finally { + if (fr != null) { + safeClose(fr); + } + + if (istr != null) { + safeClose(istr); + } + } + log.info("MessageAccepted and k200_ok to be sent"); + ctx.response().sendErrorAndBody(HttpStatusCodes.k200_ok, "Message Accepted", MimeTypes.kAppJson); + } + + public static void respondWithCustomMsginJson(DrumlinRequestContext ctx, int sc, String msg) { + String[] str = null; + String ExceptionType = "GeneralException"; + + str = CustomExceptionLoader.LookupMap(String.valueOf(sc), msg); + System.out.println("Post CustomExceptionLoader.LookupMap" + str); + + if (str != null) { + + if (str[0].matches("SVC")) { + ExceptionType = "ServiceException"; + } else if (str[1].matches("POL")) { + ExceptionType = "PolicyException"; + } + + JSONObject jb = new JSONObject().put("requestError", + new JSONObject().put(ExceptionType, new JSONObject().put("MessagID", str[0]).put("text", str[1]))); + + log.debug("Constructed json error : " + jb.toString()); + ctx.response().sendErrorAndBody(sc, jb.toString(), MimeTypes.kAppJson); + } else { + JSONObject jb = new JSONObject().put("requestError", + new JSONObject().put(ExceptionType, new JSONObject().put("Status", sc).put("Error", msg))); + ctx.response().sendErrorAndBody(sc, jb.toString(), MimeTypes.kAppJson); + } + + } + + public static void safeClose(FileReader fr) { + if (fr != null) { + try { + fr.close(); + } catch (IOException e) { + log.error("Error closing file reader stream : " + e.toString()); + } + } + + } + + public static void safeClose(InputStream is) { + if (is != null) { + try { + is.close(); + } catch (IOException e) { + log.error("Error closing Input stream : " + e.toString()); + } + } + + } + + + +} diff --git a/src/main/resources/routes.conf b/src/main/resources/routes.conf index 5c2cadb..87c6edc 100644 --- a/src/main/resources/routes.conf +++ b/src/main/resources/routes.conf @@ -4,10 +4,13 @@ package org.openecomp.dcae.restapi.endpoints # We need to deprecate the original non-versioned paths and use /v1/ for them. # Non-versioned paths will be supported "permanently." # - # # post events # +POST /eventListener/v5 EventReceipt.receiveSingleEvent +POST /eventListener/v5/eventBatch EventReceipt.receiveMultipleEvents +POST /eventListener/v4 EventReceipt.receiveSingleEvent +POST /eventListener/v4/eventBatch EventReceipt.receiveMultipleEvents POST /eventListener/v3 EventReceipt.receiveSingleEvent POST /eventListener/v3/eventBatch EventReceipt.receiveMultipleEvents POST /eventListener/v1.1 EventReceipt.receiveSingleEvent @@ -27,7 +30,7 @@ POST /eventListener/v1/eventBatch EventReceipt.receiveMultipleEvents # UI # GET / Ui.hello - +GET /healthcheck Ui.hello # typical static file paths GET /css/ staticDir:css diff --git a/src/main/resources/templates/hello.html b/src/main/resources/templates/hello.html index 3c2b806..84a5ba9 100644 --- a/src/main/resources/templates/hello.html +++ b/src/main/resources/templates/hello.html @@ -21,7 +21,7 @@ #set($tab="")
#parse("header.html")
- <h1>RESTful Collector API</h1>
- <p>This is a RESTful Collector API server.</p>
+ <h1>VES Collector API</h1>
+ <p>This is a VES Collector API server.</p>
#parse("footer.html")
diff --git a/src/main/scripts/SErestfulCollector.sh b/src/main/scripts/VESrestfulCollector.sh index 9d39c16..a6d2d27 100644 --- a/src/main/scripts/SErestfulCollector.sh +++ b/src/main/scripts/VESrestfulCollector.sh @@ -21,7 +21,7 @@ ### usage() { - echo "SErestfulCollector.sh <start/stop>" + echo "VESrestfulCollector.sh <start/stop>" } @@ -29,7 +29,7 @@ collector_start() { collectorPid=`pgrep -f org.openecomp.dcae.commonFunction` if [ ! -z "$collectorPid" ]; then - echo "WARNING: Restful Standard Event Collector already running as PID $collectorPid"; + echo "WARNING: VES Restful Collector already running as PID $collectorPid"; echo "Startup Aborted!!!" exit 1 fi @@ -50,9 +50,9 @@ collector_start() { # run java. The classpath is the etc dir for config files, and the lib dir # for all the jars. - nohup $JAVA -cp "etc${PATHSEP}lib/*" $JAVA_OPTS $MAINCLASS $* & + nohup $JAVA -cp "etc${PATHSEP}lib/*" $JAVA_OPTS -Dhttps.protocols=TLSv1.1,TLSv1.2 $MAINCLASS $* & if [ $? -ne 0 ]; then - echo "Restful Standard Event Collector has been started!!!" + echo "VES Restful Collector has been started!!!" fi @@ -66,12 +66,12 @@ collector_stop() { kill -9 $collectorPid sleep 5 if [ ! "$(pgrep -f org.openecomp.dcae.commonFunction)" ]; then - echo "Restful Standard Event Collector has been stopped!!!" + echo "VES Restful Collector has been stopped!!!" else - echo "Restful Standard Event Collector is being stopped!!!" + echo "VES Restful Collector is being stopped!!!" fi else - echo "WARNING: No Restful Standard Event Collector is currently running"; + echo "WARNING: No VES Collector instance is currently running"; exit 1 fi diff --git a/src/main/scripts/VESrestfulCollector_Status.sh b/src/main/scripts/VESrestfulCollector_Status.sh new file mode 100644 index 0000000..0365bc2 --- /dev/null +++ b/src/main/scripts/VESrestfulCollector_Status.sh @@ -0,0 +1,41 @@ +### +# ============LICENSE_START======================================================= +# PROJECT +# ================================================================================ +# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +### + +#!/bin/sh + +#secPid=`pgrep -f com.att.dcae.commonFunction.CommonStartup` --> master +secPid=`pgrep -f org.openecomp.dcae.commonFunction.CommonStartup` + + +if [ "${secPid}" ] +then + #errorcnt = `grep -c "CambriaSimplerBatchPublisher - Send failed" ../logs/collector.log` + errorcnt=`tail -1000 ../logs/collector.log | grep -c "CambriaSimplerBatchPublisher - Send failed"` + + if [ $errorcnt -gt 10 ] + then + echo "VESCollecter_Is_HavingError to publish" + else + echo "VESCollecter_Is_Running as PID $secPid" + fi +else + echo "VESCollecter_Is_Not_Running" +fi +exit |