diff options
40 files changed, 4740 insertions, 961 deletions
diff --git a/datafile-app-server/config/application.yaml b/datafile-app-server/config/application.yaml index b66f7b6e..f2538578 100644 --- a/datafile-app-server/config/application.yaml +++ b/datafile-app-server/config/application.yaml @@ -16,6 +16,6 @@ logging: org.springframework.data: ERROR org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR org.onap.dcaegen2.collectors.datafile: ERROR - file: opt/log/application.log + file: /var/log/ONAP/application.log app: - filepath: config/datafile_endpoints.json + filepath: /opt/app/datafile/config/datafile_endpoints.json diff --git a/datafile-app-server/dpo/blueprints/k8s-datafile.yaml b/datafile-app-server/dpo/blueprints/k8s-datafile.yaml new file mode 100644 index 00000000..9d13f04f --- /dev/null +++ b/datafile-app-server/dpo/blueprints/k8s-datafile.yaml @@ -0,0 +1,180 @@ +# -*- indent-tabs-mode: nil -*- # vi: set expandtab: +# +# ============LICENSE_START==================================================== +# ============================================================================= +# Copyright (C) 2019 Nordix Foundation. +# ============================================================================= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END====================================================== + +tosca_definitions_version: cloudify_dsl_1_3 + +imports: + - "http://www.getcloudify.org/spec/cloudify/3.4/types.yaml" + - "https://nexus.onap.org/service/local/repositories/raw/content/org.onap.dcaegen2.platform.plugins/R3/k8splugin/1.4.4/k8splugin_types.yaml" + +inputs: + dmaap_mr_host: + type: string + description: dmaap messagerouter host + default: message-router.onap.svc.cluster.local + dmaap_mr_port: + type: integer + description: dmaap messagerouter port + default: 3904 + dmaap_mr_user: + type: string + description: dmaap messagerouter user name + default: "admin" + dmaap_mr_passwd: + type: string + description: dmaap messagerouter password + default: "admin" + dmaap_buscontroller_service_host: + type: string + description: DMAAP Bus Controller host address + default: "dmaap-bc.onap.svc.cluster.local" + dmaap_buscontroller_service_port: + type: string + description: DMAAP bus Controller host port + default: "6666" + dmaap_dr_feed_id: + type: string + description: ID of the data router feed that datafile will publish + default: "bulk_pm_feed" + dmaap_dr_host: + type: string + description: dmaap datarouter host + default: dmaap-dr-prov.onap.svc.cluster.local + dmaap_dr_port: + type: integer + description: dmaap datarouter port + default: 8443 + dmaap_dr_user: + type: string + description: dmaap datarouter user name + default: "dradmin" + dmaap_dr_passwd: + type: string + description: dmaap datarouter password + default: "dradmin" + tag_version: + type: string + default: "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:1.0-SNAPSHOT" + replicas: + type: integer + description: number of instances + default: 1 + host_port: + type: integer + description: port on Kubernetes host where datafile API will be exposed + default: 30223 + host_port_secure: + type: integer + description: secure port on Kubernetes host where datafile API will be exposed + default: 30224 + secureEnableCert: + type: boolean + description: enable certificate base connection with PNF and DMaap + default: false +node_templates: + datafile-collector: + interfaces: + cloudify.interfaces.lifecycle: + start: + inputs: + ports: + - concat: ["8100:", { get_input: host_port }] + - concat: ["8433:", { get_input: host_port_secure}] + properties: + application_config: + dmaap.dmaapBusControllerConfiguration.dmaapHostName: {get_input: dmaap_buscontroller_service_host} + dmaap.dmaapBusControllerConfiguration.dmaapPortNumber: {get_input: dmaap_buscontroller_service_port} + dmaap.dmaapBusControllerConfiguration.dmaapTopicName: "webapi/feeds" + dmaap.dmaapBusControllerConfiguration.dmaapDrFeedName: {get_input: dmaap_dr_feed_id} + dmaap.dmaapBusControllerConfiguration.dmaapProtocol: "https" + dmaap.dmaapBusControllerConfiguration.dmaapUserName: "dbcadmin" + dmaap.dmaapBusControllerConfiguration.dmaapUserPassword: "dbcadmin" + dmaap.dmaapBusControllerConfiguration.dmaapContentType: "application/json" + dmaap.ftp.ftpesConfiguration.keyCert: "/config/ftpKey.jks" + dmaap.ftp.ftpesConfiguration.keyPassword: "secret" + dmaap.ftp.ftpesConfiguration.trustedCA: "/config/cacerts" + dmaap.ftp.ftpesConfiguration.trustedCAPassword: "secret" + dmaap.security.trustStorePath: "/opt/app/datafile/etc/cert/trust.jks" + dmaap.security.trustStorePasswordPath: "/opt/app/datafile/etc/cert/trust.pass" + dmaap.security.keyStorePath: "/opt/app/datafile/etc/cert/key.p12" + dmaap.security.keyStorePasswordPath: "/opt/app/datafile/etc/cert/key.pass" + dmaap.security.enableDmaapCertAuth: { get_input: secureEnableCert } + streams_subscribes: + dmaap_subscriber: + type: + "message_router" + dmmap_info: + dmaapHostName: + get_input: dmaap_mr_host + dmaapPortNumber: + get_input: dmaap_mr_port + dmaapTopicName: + "/events/unauthenticated.VES_NOTIFICATION_OUTPUT" + dmaapProtocol: + "http" + dmaapUserName: + get_input: dmaap_mr_user + dmaapUserPassword: + get_input: dmaap_mr_passwd + dmaapContentType: + "application/json" + consumerId: + "C12" + consumerGroup: + "OpenDCAE-c12" + timeoutMs: + -1 + messageLimit: + -1 + streams_publishes: + dmaap_publisher: + type: + "data_router" + dmaap_info: + dmaapHostName: + get_input: dmaap_dr_host + dmaapPortNumber: + get_input: dmaap_dr_port + dmaapTopicName: + "publish" + dmaapProtocol: + "https" + dmaapUserName: + get_input: dmaap_dr_user + dmaapUserPassword: + get_input: dmaap_dr_passwd + dmaapContentType: + "application/octet-stream" + docker_config: + healthcheck: + endpoint: /heartbeat + interval: 15s + timeout: 1s + type: http + image: + get_input: tag_version + replicas: {get_input: replicas} + name: 'dcae-datafile-collector' + dns_name: 'dcae-datafile-collector' + log_info: + log_directory: "/opt/app/datafile/logs" + tls_info: + cert_directory: '/opt/app/datafile/etc/cert/' + use_tls: true + type: dcae.nodes.ContainerizedPlatformComponent
\ No newline at end of file diff --git a/datafile-app-server/dpo/data-formats/VES-7.30.1-dataformat.json b/datafile-app-server/dpo/data-formats/VES-7.30.1-dataformat.json new file mode 100644 index 00000000..ba6810ac --- /dev/null +++ b/datafile-app-server/dpo/data-formats/VES-7.30.1-dataformat.json @@ -0,0 +1,2754 @@ +{ + "self": { + "name": "VES_specification", + "version": "7.30.1", + "description": "VES spec for v7.0.1" + }, + "dataformatversion": "1.0.0", + "jsonschema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "VES Event Listener Common Event Format", + "type": "object", + "properties": { + "event": { + "$ref": "#/definitions/event" + }, + "eventList": { + "$ref": "#/definitions/eventList" + } + }, + "definitions": { + "schemaHeaderBlock": { + "description": "schema date, version, author and associated API", + "type": "object", + "properties": { + "associatedApi": { + "description": "VES Event Listener", + "type": "string" + }, + "lastUpdatedBy": { + "description": "re2947", + "type": "string" + }, + "schemaDate": { + "description": "July 31, 2018", + "type": "string" + }, + "schemaVersion": { + "description": "30.0.1", + "type": "number" + } + } + }, + "schemaLicenseAndCopyrightNotice": { + "description": "Copyright (c) 2018, AT&T Intellectual Property. All rights reserved", + "type": "object", + "properties": { + "apacheLicense2.0": { + "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:", + "type": "string" + }, + "licenseUrl": { + "description": "http://www.apache.org/licenses/LICENSE-2.0", + "type": "string" + }, + "asIsClause": { + "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", + "type": "string" + }, + "permissionsAndLimitations": { + "description": "See the License for the specific language governing permissions and limitations under the License.", + "type": "string" + } + } + }, + "arrayOfJsonObject": { + "description": "array of json objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "arrayOfNamedHashMap": { + "description": "array of named hashMaps", + "type": "array", + "items": { + "$ref": "#/definitions/namedHashMap" + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { + "type": "string" + }, + "numberInUse": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "codecIdentifier", + "numberInUse" + ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurement", + "mobileFlow", + "notification", + "other", + "pnfRegistration", + "sipSignaling", + "stateChange", + "syslog", + "thresholdCrossingAlert", + "voiceQuality" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventName": { + "description": "unique event name", + "type": "string" + }, + "eventType": { + "description": "for example - applicationNf, guestOS, hostOS, platform", + "type": "string" + }, + "internalHeaderFields": { + "$ref": "#/definitions/internalHeaderFields" + }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "nfcNamingCode": { + "description": "3 character network function component type, aligned with vfc naming standards", + "type": "string" + }, + "nfNamingCode": { + "description": "4 character network function type, aligned with nf naming standards", + "type": "string" + }, + "nfVendorName": { + "description": "network function vendor name", + "type": "string" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "timeZoneOffset": { + "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm", + "type": "string" + }, + "version": { + "description": "version of the event header", + "type": "string", + "enum": [ + "4.0.1" + ] + }, + "vesEventListenerVersion": { + "description": "version of the VES Event Listener API", + "type": "string", + "enum": [ + "7.0.1" + ] + } + }, + "additionalProperties": false, + "required": [ + "domain", + "eventId", + "eventName", + "lastEpochMicrosec", + "priority", + "reportingEntityName", + "sequence", + "sourceName", + "startEpochMicrosec", + "version", + "vesEventListenerVersion" + ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { + "type": "string", + "enum": [ + "CRIT", + "MAJ" + ] + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + }, + "thresholdCrossed": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "criticality", + "hashMap", + "thresholdCrossed" + ] + }, + "cpuUsage": { + "description": "usage of an identified CPU", + "type": "object", + "properties": { + "cpuCapacityContention": { + "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandAvg": { + "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandMhz": { + "description": "CPU demand in megahertz", + "type": "number" + }, + "cpuDemandPct": { + "description": "CPU demand as a percentage of the provisioned capacity", + "type": "number" + }, + "cpuIdentifier": { + "description": "cpu identifer", + "type": "string" + }, + "cpuIdle": { + "description": "percentage of CPU time spent in the idle task", + "type": "number" + }, + "cpuLatencyAvg": { + "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs", + "type": "number" + }, + "cpuOverheadAvg": { + "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuSwapWaitTime": { + "description": "swap wait time. in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuUsageInterrupt": { + "description": "percentage of time spent servicing interrupts", + "type": "number" + }, + "cpuUsageNice": { + "description": "percentage of time spent running user space processes that have been niced", + "type": "number" + }, + "cpuUsageSoftIrq": { + "description": "percentage of time spent handling soft irq interrupts", + "type": "number" + }, + "cpuUsageSteal": { + "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", + "type": "number" + }, + "cpuUsageSystem": { + "description": "percentage of time spent on system tasks running the kernel", + "type": "number" + }, + "cpuUsageUser": { + "description": "percentage of time spent running un-niced user space processes", + "type": "number" + }, + "cpuWait": { + "description": "percentage of CPU time spent waiting for I/O operations to complete", + "type": "number" + }, + "percentUsage": { + "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "cpuIdentifier", + "percentUsage" + ] + }, + "diskUsage": { + "description": "usage of an identified disk", + "type": "object", + "properties": { + "diskBusResets": { + "description": "number of bus resets over the measurementInterval", + "type": "number" + }, + "diskCommandsAborted": { + "description": "number of disk commands aborted over the measurementInterval", + "type": "number" + }, + "diskCommandsAvg": { + "description": "average number of commands per second over the measurementInterval", + "type": "number" + }, + "diskFlushRequests": { + "description": "total flush requests of the disk cache over the measurementInterval", + "type": "number" + }, + "diskFlushTime": { + "description": "milliseconds spent on disk cache flushing over the measurementInterval", + "type": "number" + }, + "diskIdentifier": { + "description": "disk identifier", + "type": "string" + }, + "diskIoTimeAvg": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", + "type": "number" + }, + "diskIoTimeLast": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMax": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMin": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadAvg": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadLast": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMax": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMin": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteAvg": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteLast": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMax": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMin": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadAvg": { + "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadLast": { + "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMax": { + "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMin": { + "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteAvg": { + "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteLast": { + "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMax": { + "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMin": { + "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadAvg": { + "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadLast": { + "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMax": { + "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMin": { + "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteAvg": { + "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteLast": { + "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMax": { + "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMin": { + "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsAvg": { + "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsLast": { + "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMax": { + "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMin": { + "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskReadCommandsAvg": { + "description": "average number of read commands issued per second to the disk over the measurementInterval", + "type": "number" + }, + "diskTime": { + "description": "nanoseconds spent on disk cache reads/writes within the measurement interval", + "type": "number" + }, + "diskTimeReadAvg": { + "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadLast": { + "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMax": { + "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMin": { + "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteAvg": { + "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteLast": { + "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMax": { + "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMin": { + "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTotalReadLatencyAvg": { + "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskTotalWriteLatencyAvg": { + "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskWeightedIoTimeAvg": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeLast": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMax": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMin": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval", + "type": "number" + }, + "diskWriteCommandsAvg": { + "description": "average number of write commands issued per second to the disk over the measurementInterval", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "diskIdentifier" + ] + }, + "endOfCallVqmSummaries": { + "description": "provides end of call voice quality metrics", + "type": "object", + "properties": { + "adjacencyName": { + "description": " adjacency name", + "type": "string" + }, + "endpointAverageJitter": { + "description": "endpoint average jitter", + "type": "number" + }, + "endpointDescription": { + "description": "either Caller or Callee", + "type": "string", + "enum": [ + "Caller", + "Callee" + ] + }, + "endpointMaxJitter": { + "description": "endpoint maximum jitter", + "type": "number" + }, + "endpointRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsLost": { + "description": "endpoint RTP octets lost", + "type": "number" + }, + "endpointRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsSent": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsLost": { + "description": "endpoint RTP packets lost", + "type": "number" + }, + "endpointRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsSent": { + "description": "", + "type": "number" + }, + "localAverageJitter": { + "description": "Local average jitter", + "type": "number" + }, + "localAverageJitterBufferDelay": { + "description": "Local average jitter delay", + "type": "number" + }, + "localMaxJitter": { + "description": "Local maximum jitter", + "type": "number" + }, + "localMaxJitterBufferDelay": { + "description": "Local maximum jitter delay", + "type": "number" + }, + "localRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpOctetsLost": { + "description": "Local RTP octets lost", + "type": "number" + }, + "localRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "localRtpOctetsSent": { + "description": "", + "type": "number" + }, + "localRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpPacketsLost": { + "description": "Local RTP packets lost", + "type": "number" + }, + "localRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "localRtpPacketsSent": { + "description": "", + "type": "number" + }, + "mosCqe": { + "description": "1-5 1dp", + "type": "number" + }, + "oneWayDelay": { + "description": "one-way path delay in milliseconds", + "type": "number" + }, + "packetLossPercent": { + "description": "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", + "type": "number" + }, + "rFactor": { + "description": "0-100", + "type": "number" + }, + "roundTripDelay": { + "description": "millisecs", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "adjacencyName", + "endpointDescription" + ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { + "$ref": "#/definitions/commonEventHeader" + }, + "faultFields": { + "$ref": "#/definitions/faultFields" + }, + "heartbeatFields": { + "$ref": "#/definitions/heartbeatFields" + }, + "measurementFields": { + "$ref": "#/definitions/measurementFields" + }, + "mobileFlowFields": { + "$ref": "#/definitions/mobileFlowFields" + }, + "notificationFields": { + "$ref": "#/definitions/notificationFields" + }, + "otherFields": { + "$ref": "#/definitions/otherFields" + }, + "pnfRegistrationFields": { + "$ref": "#/definitions/pnfRegistrationFields" + }, + "sipSignalingFields": { + "$ref": "#/definitions/sipSignalingFields" + }, + "stateChangeFields": { + "$ref": "#/definitions/stateChangeFields" + }, + "syslogFields": { + "$ref": "#/definitions/syslogFields" + }, + "thresholdCrossingAlertFields": { + "$ref": "#/definitions/thresholdCrossingAlertFields" + }, + "voiceQualityFields": { + "$ref": "#/definitions/voiceQualityFields" + } + }, + "additionalProperties": false, + "required": [ + "commonEventHeader" + ] + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventCategory": { + "description": "Event category, for example: license, link, routing, security, signaling", + "type": "string" + }, + "eventSeverity": { + "description": "event severity", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "additionalProperties": false, + "required": [ + "alarmCondition", + "eventSeverity", + "eventSourceType", + "faultFieldsVersion", + "specificProblem", + "vfStatus" + ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { + "type": "number" + }, + "blockIops": { + "type": "number" + }, + "blockUsed": { + "type": "number" + }, + "ephemeralConfigured": { + "type": "number" + }, + "ephemeralIops": { + "type": "number" + }, + "ephemeralUsed": { + "type": "number" + }, + "filesystemName": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "blockConfigured", + "blockIops", + "blockUsed", + "ephemeralConfigured", + "ephemeralIops", + "ephemeralUsed", + "filesystemName" + ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { + "$ref": "#/definitions/hashMap" + }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { + "$ref": "#/definitions/hashMap" + }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { + "$ref": "#/definitions/hashMap" + }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "avgBitErrorRate", + "avgPacketDelayVariation", + "avgPacketLatency", + "avgReceiveThroughput", + "avgTransmitThroughput", + "flowActivationEpoch", + "flowActivationMicrosec", + "flowDeactivationEpoch", + "flowDeactivationMicrosec", + "flowDeactivationTime", + "flowStatus", + "maxPacketDelayVariation", + "numActivationFailures", + "numBitErrors", + "numBytesReceived", + "numBytesTransmitted", + "numDroppedPackets", + "numL7BytesReceived", + "numL7BytesTransmitted", + "numLostPackets", + "numOutOfOrderPackets", + "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", + "numTimeouts", + "numTunneledL7BytesReceived", + "roundTripTime", + "timeToFirstByte" + ] + }, + "hashMap": { + "description": "an associative array which is an array of key:value pairs", + "type": "object", + "additionalProperties": { + "type": "string" + }, + "default": { + + } + }, + "heartbeatFields": { + "description": "optional field block for fields specific to heartbeat events", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "heartbeatFieldsVersion": { + "description": "version of the heartbeatFields block", + "type": "string", + "enum": [ + "3.0" + ] + }, + "heartbeatInterval": { + "description": "current heartbeat interval in seconds", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "heartbeatFieldsVersion", + "heartbeatInterval" + ] + }, + "hugePages": { + "description": "metrics on system hugepages", + "type": "object", + "properties": { + "bytesFree": { + "description": "number of free hugepages in bytes", + "type": "number" + }, + "bytesUsed": { + "description": "number of used hugepages in bytes", + "type": "number" + }, + "hugePagesIdentifier": { + "description": "hugePages identifier", + "type": "number" + }, + "percentFree": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "percentUsed": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "vmPageNumberFree": { + "description": "number of free vmPages in numbers", + "type": "number" + }, + "vmPageNumberUsed": { + "description": "number of used vmPages in numbers", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "hugePagesIdentifier" + ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "ipmi": { + "description": "intelligent platform management interface metrics", + "type": "object", + "properties": { + "exitAirTemperature": { + "description": "system fan exit air flow temperature in celsius", + "type": "number" + }, + "frontPanelTemperature": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "ioModuleTemperature": { + "description": "io module temperature in celsius", + "type": "number" + }, + "ipmiBaseboardTemperatureArray": { + "description": "array of ipmiBaseboardTemperature objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardTemperature" + } + }, + "ipmiBaseboardVoltageRegulatorArray": { + "description": "array of ipmiBaseboardVoltageRegulator objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardVoltageRegulator" + } + }, + "ipmiBatteryArray": { + "description": "array of ipmiBattery objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBattery" + } + }, + "ipmiFanArray": { + "description": "array of ipmiFan objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiFan" + } + }, + "ipmiHsbpArray": { + "description": "array of ipmiHsbp objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiHsbp" + } + }, + "ipmiGlobalAggregateTemperatureMarginArray": { + "description": "array of ipmiGlobalAggregateTemperatureMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin" + } + }, + "ipmiNicArray": { + "description": "array of ipmiNic objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiNic" + } + }, + "ipmiPowerSupplyArray": { + "description": "array of ipmiPowerSupply objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiPowerSupply" + } + }, + "ipmiProcessorArray": { + "description": "array of ipmiProcessor objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiProcessor" + } + }, + "systemAirflow": { + "description": "airfflow in cubic feet per minute (cfm)", + "type": "number" + } + }, + "additionalProperties": false + }, + "ipmiBaseboardTemperature": { + "description": "intelligent platform management interface (ipmi) baseboard temperature metrics", + "type": "object", + "properties": { + "baseboardTemperatureIdentifier": { + "description": "identifier for the location where the temperature is taken", + "type": "string" + }, + "baseboardTemperature": { + "description": "baseboard temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "baseboardTemperatureIdentifier" + ] + }, + "ipmiBaseboardVoltageRegulator": { + "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics", + "type": "object", + "properties": { + "baseboardVoltageRegulatorIdentifier": { + "description": "identifier for the baseboard voltage regulator", + "type": "string" + }, + "voltageRegulatorTemperature": { + "description": "voltage regulator temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "baseboardVoltageRegulatorIdentifier" + ] + }, + "ipmiBattery": { + "description": "intelligent platform management interface (ipmi) battery metrics", + "type": "object", + "properties": { + "batteryIdentifier": { + "description": "identifier for the battery", + "type": "string" + }, + "batteryType": { + "description": "type of battery", + "type": "string" + }, + "batteryVoltageLevel": { + "description": "battery voltage level", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "batteryIdentifier" + ] + }, + "ipmiFan": { + "description": "intelligent platform management interface (ipmi) fan metrics", + "type": "object", + "properties": { + "fanIdentifier": { + "description": "identifier for the fan", + "type": "string" + }, + "fanSpeed": { + "description": "fan speed in revolutions per minute (rpm)", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "fanIdentifier" + ] + }, + "ipmiGlobalAggregateTemperatureMargin": { + "description": "intelligent platform management interface (ipmi) global aggregate temperature margin", + "type": "object", + "properties": { + "ipmiGlobalAggregateTemperatureMarginIdentifier": { + "description": "identifier for the ipmi global aggregate temperature margin metrics", + "type": "string" + }, + "globalAggregateTemperatureMargin": { + "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "ipmiGlobalAggregateTemperatureMarginIdentifier", + "globalAggregateTemperatureMargin" + ] + }, + "ipmiHsbp": { + "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics", + "type": "object", + "properties": { + "hsbpIdentifier": { + "description": "identifier for the hot swap backplane power unit", + "type": "string" + }, + "hsbpTemperature": { + "description": "hot swap backplane power temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "hsbpIdentifier" + ] + }, + "ipmiNic": { + "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics", + "type": "object", + "properties": { + "nicIdentifier": { + "description": "identifier for the network interface control card", + "type": "string" + }, + "nicTemperature": { + "description": "nic temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "nicIdentifier" + ] + }, + "ipmiPowerSupply": { + "description": "intelligent platform management interface (ipmi) power supply metrics", + "type": "object", + "properties": { + "powerSupplyIdentifier": { + "description": "identifier for the power supply", + "type": "string" + }, + "powerSupplyInputPower": { + "description": "input power in watts", + "type": "number" + }, + "powerSupplyCurrentOutputPercent": { + "description": "current output voltage as a percentage of the design specified level", + "type": "number" + }, + "powerSupplyTemperature": { + "description": "power supply temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "powerSupplyIdentifier" + ] + }, + "ipmiProcessor": { + "description": "intelligent platform management interface processor metrics", + "type": "object", + "properties": { + "processorIdentifier": { + "description": "identifier for an ipmi processor", + "type": "string" + }, + "processorThermalControlPercent": { + "description": "io module temperature in celsius", + "type": "number" + }, + "processorDtsThermalMargin": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "processorDimmAggregateThermalMarginArray": { + "description": "array of processorDimmAggregateThermalMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/processorDimmAggregateThermalMargin" + } + } + }, + "additionalProperties": false, + "required": [ + "processorIdentifier" + ] + }, + "jsonObject": { + "description": "json object schema, name and other meta-information along with one or more object instances", + "type": "object", + "properties": { + "objectInstances": { + "description": "one or more instances of the jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObjectInstance" + } + }, + "objectName": { + "description": "name of the JSON Object", + "type": "string" + }, + "objectSchema": { + "description": "json schema for the object", + "type": "string" + }, + "objectSchemaUrl": { + "description": "Url to the json schema for the object", + "type": "string" + }, + "nfSubscribedObjectName": { + "description": "name of the object associated with the nfSubscriptonId", + "type": "string" + }, + "nfSubscriptionId": { + "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "objectInstances", + "objectName" + ] + }, + "jsonObjectInstance": { + "description": "meta-information about an instance of a jsonObject along with the actual object instance", + "type": "object", + "properties": { + "jsonObject": { + "$ref": "#/definitions/jsonObject" + }, + "objectInstance": { + "description": "an instance conforming to the jsonObject objectSchema", + "type": "object" + }, + "objectInstanceEpochMicrosec": { + "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "objectKeys": { + "description": "an ordered set of keys that identifies this particular instance of jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/key" + } + } + }, + "additionalProperties": false + }, + "key": { + "description": "tuple which provides the name of a key along with its value and relative order", + "type": "object", + "properties": { + "keyName": { + "description": "name of the key", + "type": "string" + }, + "keyOrder": { + "description": "relative sequence or order of the key with respect to other keys", + "type": "integer" + }, + "keyValue": { + "description": "value of the key", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "keyName" + ] + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { + "type": "number" + }, + "highEndOfLatencyBucket": { + "type": "number" + }, + "lowEndOfLatencyBucket": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "countsInTheBucket" + ] + }, + "load": { + "description": "/proc/loadavg cpu utilization and io utilization metrics", + "type": "object", + "properties": { + "longTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg", + "type": "number" + }, + "midTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg", + "type": "number" + }, + "shortTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg", + "type": "number" + } + }, + "additionalProperties": false + }, + "machineCheckException": { + "description": "metrics on vm machine check exceptions", + "type": "object", + "properties": { + "correctedMemoryErrors": { + "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via � ECC) over the measurementInterval", + "type": "number" + }, + "correctedMemoryErrorsIn1Hr": { + "description": "total hardware errors that were corrected by the hardware over the last one hour", + "type": "number" + }, + "uncorrectedMemoryErrors": { + "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval", + "type": "number" + }, + "uncorrectedMemoryErrorsIn1Hr": { + "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the machine check exception", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "vmIdentifier" + ] + }, + "measurementFields": { + "description": "measurement fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "additionalMeasurements": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "additionalObjects": { + "$ref": "#/definitions/arrayOfJsonObject" + }, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or xNF over the measurementInterval", + "type": "integer" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF", + "type": "integer" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "diskUsageArray": { + "description": "usage of an array of disks", + "type": "array", + "items": { + "$ref": "#/definitions/diskUsage" + } + }, + "featureUsageArray": { + "$ref": "#/definitions/hashMap" + }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the xNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "hugePagesArray": { + "description": "array of metrics on hugepPages", + "type": "array", + "items": { + "$ref": "#/definitions/hugePages" + } + }, + "ipmi": { + "$ref": "#/definitions/ipmi" + }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "loadArray": { + "description": "array of system load metrics", + "type": "array", + "items": { + "$ref": "#/definitions/load" + } + }, + "machineCheckExceptionArray": { + "description": "array of machine check exceptions", + "type": "array", + "items": { + "$ref": "#/definitions/machineCheckException" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementFieldsVersion": { + "description": "version of the measurementFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "memoryUsageArray": { + "description": "memory usage of an array of VMs", + "type": "array", + "items": { + "$ref": "#/definitions/memoryUsage" + } + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "integer" + }, + "requestRate": { + "description": "peak rate of service requests per second to the xNF over the measurementInterval", + "type": "number" + }, + "nfcScalingMetric": { + "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC", + "type": "integer" + }, + "nicPerformanceArray": { + "description": "usage of an array of network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/nicPerformance" + } + }, + "processStatsArray": { + "description": "array of metrics on system processes", + "type": "array", + "items": { + "$ref": "#/definitions/processStats" + } + } + }, + "additionalProperties": false, + "required": [ + "measurementInterval", + "measurementFieldsVersion" + ] + }, + "memoryUsage": { + "description": "memory usage of an identified virtual machine", + "type": "object", + "properties": { + "memoryBuffered": { + "description": "kibibytes of temporary storage for raw disk blocks", + "type": "number" + }, + "memoryCached": { + "description": "kibibytes of memory used for cache", + "type": "number" + }, + "memoryConfigured": { + "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running", + "type": "number" + }, + "memoryDemand": { + "description": "host demand in kibibytes", + "type": "number" + }, + "memoryFree": { + "description": "kibibytes of physical RAM left unused by the system", + "type": "number" + }, + "memoryLatencyAvg": { + "description": "Percentage of time the VM is waiting to access swapped or compressed memory", + "type": "number" + }, + "memorySharedAvg": { + "description": "shared memory in kilobytes", + "type": "number" + }, + "memorySlabRecl": { + "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", + "type": "number" + }, + "memorySlabUnrecl": { + "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", + "type": "number" + }, + "memorySwapInAvg": { + "description": "Amount of memory swapped-in from host cache in kibibytes", + "type": "number" + }, + "memorySwapInRateAvg": { + "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second", + "type": "number" + }, + "memorySwapOutAvg": { + "description": "Amount of memory swapped-out to host cache in kibibytes", + "type": "number" + }, + "memorySwapOutRateAvg": { + "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second", + "type": "number" + }, + "memorySwapUsedAvg": { + "description": "space used for caching swapped pages in the host cache in kibibytes", + "type": "number" + }, + "memoryUsed": { + "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", + "type": "number" + }, + "percentMemoryUsage": { + "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the memory metrics", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "memoryFree", + "memoryUsed", + "vmIdentifier" + ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { + "$ref": "#/definitions/gtpPerFlowMetrics" + }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "integer" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "flowDirection", + "gtpPerFlowMetrics", + "ipProtocolType", + "ipVersion", + "mobileFlowFieldsVersion", + "otherEndpointIpAddress", + "otherEndpointPort", + "reportingEndpointIpAddr", + "reportingEndpointPort" + ] + }, + "namedHashMap": { + "description": "a hashMap which is associated with and described by a name", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + } + }, + "additionalProperties": false, + "required": [ + "name", + "hashMap" + ] + }, + "nicPerformance": { + "description": "describes the performance and errors of an identified network interface card", + "type": "object", + "properties": { + "administrativeState": { + "description": "administrative state", + "type": "string", + "enum": [ + "inService", + "outOfService" + ] + }, + "nicIdentifier": { + "description": "nic identification", + "type": "string" + }, + "operationalState": { + "description": "operational state", + "type": "string", + "enum": [ + "inService", + "outOfService" + ] + }, + "receivedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedBroadcastPacketsDelta": { + "description": "Count of broadcast packets received within the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsDelta": { + "description": "Count of discarded packets received within the measurement interval", + "type": "number" + }, + "receivedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedErrorPacketsDelta": { + "description": "Count of error packets received within the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsDelta": { + "description": "Count of multicast packets received within the measurement interval", + "type": "number" + }, + "receivedOctetsAccumulated": { + "description": "Cumulative count of octets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedOctetsDelta": { + "description": "Count of octets received within the measurement interval", + "type": "number" + }, + "receivedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedPercentDiscard": { + "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "receivedPercentError": { + "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "receivedTotalPacketsDelta": { + "description": "Count of all packets received within the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsDelta": { + "description": "Count of unicast packets received within the measurement interval", + "type": "number" + }, + "receivedUtilization": { + "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "speed": { + "description": "Speed configured in mbps", + "type": "number" + }, + "transmittedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsDelta": { + "description": "Count of broadcast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsDelta": { + "description": "Count of discarded packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsDelta": { + "description": "Count of error packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsDelta": { + "description": "Count of multicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedOctetsAccumulated": { + "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedOctetsDelta": { + "description": "Count of octets transmitted within the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsDelta": { + "description": "Count of all packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsDelta": { + "description": "Count of unicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedPercentDiscard": { + "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedPercentError": { + "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedUtilization": { + "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "valuesAreSuspect": { + "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", + "type": "string", + "enum": [ + "true", + "false" + ] + } + }, + "additionalProperties": false, + "required": [ + "nicIdentifier", + "valuesAreSuspect" + ] + }, + "notificationFields": { + "description": "notification fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "arrayOfNamedHashMap": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "changeContact": { + "description": "identifier for a contact related to the change", + "type": "string" + }, + "changeIdentifier": { + "description": "system or session identifier associated with the change", + "type": "string" + }, + "changeType": { + "description": "describes what has changed for the entity", + "type": "string" + }, + "newState": { + "description": "new state of the entity", + "type": "string" + }, + "oldState": { + "description": "previous state of the entity", + "type": "string" + }, + "notificationFieldsVersion": { + "description": "version of the notificationFields block", + "type": "string", + "enum": [ + "2.0" + ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "changeIdentifier", + "changeType", + "notificationFieldsVersion" + ] + }, + "otherFields": { + "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "arrayOfNamedHashMap": { + "$ref": "#/definitions/arrayOfNamedHashMap" + }, + "hashMap": { + "$ref": "#/definitions/hashMap" + }, + "jsonObjects": { + "$ref": "#/definitions/arrayOfJsonObject" + }, + "otherFieldsVersion": { + "description": "version of the otherFields block", + "type": "string", + "enum": [ + "3.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "otherFieldsVersion" + ] + }, + "pnfRegistrationFields": { + "description": "hardware device registration fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "lastServiceDate": { + "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017", + "type": "string" + }, + "macAddress": { + "description": "MAC address of OAM interface of the unit", + "type": "string" + }, + "manufactureDate": { + "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016", + "type": "string" + }, + "modelNumber": { + "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model", + "type": "string" + }, + "oamV4IpAddress": { + "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF", + "type": "string" + }, + "oamV6IpAddress": { + "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF", + "type": "string" + }, + "pnfRegistrationFieldsVersion": { + "description": "version of the pnfRegistrationFields block", + "type": "string", + "enum": [ + "2.0" + ] + }, + "serialNumber": { + "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3", + "type": "string" + }, + "softwareVersion": { + "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201", + "type": "string" + }, + "unitFamily": { + "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU", + "type": "string" + }, + "unitType": { + "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale", + "type": "string" + }, + "vendorName": { + "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "pnfRegistrationFieldsVersion" + ] + }, + "processorDimmAggregateThermalMargin": { + "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics", + "type": "object", + "properties": { + "processorDimmAggregateThermalMarginIdentifier": { + "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module", + "type": "string" + }, + "thermalMargin": { + "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "processorDimmAggregateThermalMarginIdentifier", + "thermalMargin" + ] + }, + "processStats": { + "description": "metrics on system processes", + "type": "object", + "properties": { + "forkRate": { + "description": "the number of threads created since the last reboot", + "type": "number" + }, + "processIdentifier": { + "description": "processIdentifier", + "type": "string" + }, + "psStateBlocked": { + "description": "the number of processes in a blocked state", + "type": "number" + }, + "psStatePaging": { + "description": "the number of processes in a paging state", + "type": "number" + }, + "psStateRunning": { + "description": "the number of processes in a running state", + "type": "number" + }, + "psStateSleeping": { + "description": "the number of processes in a sleeping state", + "type": "number" + }, + "psStateStopped": { + "description": "the number of processes in a stopped state", + "type": "number" + }, + "psStateZombie": { + "description": "the number of processes in a zombie state", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "processIdentifier" + ] + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "messageId", + "text" + ] + }, + "sipSignalingFields": { + "description": "sip signaling fields", + "type": "object", + "properties": { + "additionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "compressedSip": { + "description": "the full SIP request/response including headers and bodies", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "localIpAddress": { + "description": "IP address on xNF", + "type": "string" + }, + "localPort": { + "description": "port on xNF", + "type": "string" + }, + "remoteIpAddress": { + "description": "IP address of peer endpoint", + "type": "string" + }, + "remotePort": { + "description": "port of peer endpoint", + "type": "string" + }, + "sipSignalingFieldsVersion": { + "description": "version of the sipSignalingFields block", + "type": "string", + "enum": [ + "3.0" + ] + }, + "summarySip": { + "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + } + }, + "additionalProperties": false, + "required": [ + "correlator", + "localIpAddress", + "localPort", + "remoteIpAddress", + "remotePort", + "sipSignalingFieldsVersion", + "vendorNfNameFields" + ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "newState", + "oldState", + "stateChangeFieldsVersion", + "stateInterface" + ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "integer" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "string", + "enum": [ + "4.0" + ] + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogMsgHost": { + "description": "hostname parsed from non-VES syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "integer" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string", + "enum": [ + "Alert", + "Critical", + "Debug", + "Emergency", + "Error", + "Info", + "Notice", + "Warning" + ] + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogTs": { + "description": "timestamp parsed from non-VES syslog message", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "eventSourceType", + "syslogFieldsVersion", + "syslogMsg", + "syslogTag" + ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { + "$ref": "#/definitions/hashMap" + }, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { + "type": "string" + } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "string", + "enum": [ + "4.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp", + "thresholdCrossingFieldsVersion" + ] + }, + "vendorNfNameFields": { + "description": "provides vendor, nf and nfModule identifying information", + "type": "object", + "properties": { + "vendorName": { + "description": "network function vendor name", + "type": "string" + }, + "nfModuleName": { + "description": "name of the nfModule generating the event", + "type": "string" + }, + "nfName": { + "description": "name of the network function generating the event", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "vendorName" + ] + }, + "voiceQualityFields": { + "description": "provides statistics related to customer facing voice products", + "type": "object", + "properties": { + "additionalInformation": { + "$ref": "#/definitions/hashMap" + }, + "calleeSideCodec": { + "description": "callee codec for the call", + "type": "string" + }, + "callerSideCodec": { + "description": "caller codec for the call", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "endOfCallVqmSummaries": { + "$ref": "#/definitions/endOfCallVqmSummaries" + }, + "phoneNumber": { + "description": "phone number associated with the correlator", + "type": "string" + }, + "midCallRtcp": { + "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + }, + "voiceQualityFieldsVersion": { + "description": "version of the voiceQualityFields block", + "type": "string", + "enum": [ + "4.0" + ] + } + }, + "additionalProperties": false, + "required": [ + "calleeSideCodec", + "callerSideCodec", + "correlator", + "midCallRtcp", + "vendorNfNameFields", + "voiceQualityFieldsVersion" + ] + } + } + } +} diff --git a/datafile-app-server/dpo/data-formats/XML-1.0.0-dataformat.json b/datafile-app-server/dpo/data-formats/XML-1.0.0-dataformat.json new file mode 100644 index 00000000..8e8550fc --- /dev/null +++ b/datafile-app-server/dpo/data-formats/XML-1.0.0-dataformat.json @@ -0,0 +1,11 @@ +{ + "self": { + "name": "3GPP_XML", + "description": "Unstructured 3GPP XML", + "version": "1.0.0" + }, + "dataformatversion": "1.0.0", + "unstructured": { + "encoding": "UTF-8" + } +}
\ No newline at end of file diff --git a/datafile-app-server/dpo/policy.yaml b/datafile-app-server/dpo/policy.yaml new file mode 100644 index 00000000..28f9db15 --- /dev/null +++ b/datafile-app-server/dpo/policy.yaml @@ -0,0 +1,26 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +node_types: + policy.nodes.Root: + derived_from: tosca.nodes.Root + properties: + policyDescription: + required: false + type: string + policyName: + required: true + type: string + policyScope: + required: true + type: string + policyVersion: + required: true + type: string + policy.nodes.dcaegen2.collectors.datafile.datafile-app-server: + derived_from: policy.nodes.Root + properties: + buscontroller_feed_publishing_endpoint: + type: string + description: DMAAP Bus Controller feed endpoint + datafile.policy: + type: string + description: datafile Policy JSON as string diff --git a/datafile-app-server/dpo/spec/datafile-component-spec.json b/datafile-app-server/dpo/spec/datafile-component-spec.json new file mode 100644 index 00000000..43ac27b5 --- /dev/null +++ b/datafile-app-server/dpo/spec/datafile-component-spec.json @@ -0,0 +1,96 @@ +{ + "self": { + "name": "dcaegen2.collectors.datafile.datafile-app-server", + "version": "1.0.0", + "description": "Docker application to collect log file from PNF", + "component_type": "docker" + }, + "streams": { + "subscribes": [ + { + "type": "message_router", + "config_key": "datafile_subscribe_mr", + "format": "VES_specification", + "version": "7.30.1" + } + ], + "publishes": [ + { + "type": "data_router", + "config_key": "datafile_publish_dr", + "format": "3GPP_XML", + "version": "1.0.0" + } + ] + }, + "services": { + "calls": [], + "provides": [] + }, + "auxilary": { + "healthcheck": { + "type": "http", + "interval": "15s", + "timeout": "1s", + "endpoint": "/heartbeat" + } + }, + "artifacts": [ + { + "uri": "nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:latest", + "type": "docker image" + } + ], + "parameters": [ + { + "name": "service_name", + "value": "datafile", + "description": "Name of the service", + "designer_editable": true, + "sourced_at_deployment": false, + "policy_editable": false, + "type": "string", + "required": true + }, + { + "name": "buscontroller_feed_publishing_endpoint", + "value": "http://dmaap-bc.onap.svc.cluster.local:8080/webapi/feeds", + "description": "DMAAP Bus Controller feed endpoint", + "designer_editable": true, + "sourced_at_deployment": false, + "policy_editable": true, + "type": "string", + "required": true + }, + { + "name": "dmaap_dr_feed_id", + "value": "bulk_pm_feed", + "description": "ID of the data router feed that the datafile collector will publish", + "designer_editable": true, + "sourced_at_deployment": false, + "policy_editable": false, + "type": "string", + "required": true + }, + { + "name": "streams_consumer.datafile_consume_mr.message_router_topic", + "value": "/events/unauthenticated.VES_NOTIFICATION_OUTPUT", + "description": "datafile collector consume VES event to message router topic ", + "designer_editable": true, + "sourced_at_deployment": false, + "policy_editable": false, + "type": "string", + "required": true + }, + { + "name": "datafile.policy", + "value": "", + "description": "datafile Policy JSON as string", + "designer_editable": false, + "sourced_at_deployment": false, + "policy_editable": true, + "type": "string", + "required": true + } + ] +} diff --git a/datafile-app-server/dpo/spec/dmaap.json b/datafile-app-server/dpo/spec/dmaap.json new file mode 100644 index 00000000..64ceb880 --- /dev/null +++ b/datafile-app-server/dpo/spec/dmaap.json @@ -0,0 +1,14 @@ +{ + "datafile_subscribe_mr": { + "type": "message_router", + "dmaap_info": { + "topic_url": "http://message-router.onap.svc.cluster.local:3904/events/unauthenticated.VES_NOTIFICATION_OUTPUT" + } + }, + "datafile_publish_dr": { + "type": "data_router", + "dmaap_info": { + "publish_url": "" + } + } +} diff --git a/datafile-app-server/dpo/tosca_models/schema.yaml b/datafile-app-server/dpo/tosca_models/schema.yaml new file mode 100644 index 00000000..4b02f8b7 --- /dev/null +++ b/datafile-app-server/dpo/tosca_models/schema.yaml @@ -0,0 +1,568 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +capability_types: + dcae.capabilities.cdapHost: + derived_from: tosca.capabilities.Root + dcae.capabilities.composition.host: + derived_from: tosca.capabilities.Root + properties: + location_id: + type: string + service_id: + type: string + dcae.capabilities.dmmap.feed: + derived_from: tosca.capabilities.Root + properties: + feed_name: + type: string + location: + type: string + node_name: + type: string + dcae.capabilities.dmmap.topic: + derived_from: tosca.capabilities.Root + properties: + aaf_password: + type: string + aaf_username: + type: string + client_role: + type: string + location: + type: string + node_name: + type: string + topic_name: + type: string + dcae.capabilities.dockerHost: + derived_from: tosca.capabilities.Root + dcae.capabilities.policy: + derived_from: tosca.capabilities.Root + dcae.capabilities.service.provide: + derived_from: tosca.capabilities.Root + properties: + request_format: + type: string + request_version: + type: string + response_format: + type: string + response_version: + type: string + service_endpoint: + type: string + service_name: + type: string + verb: + type: string + dcae.capabilities.stream.subscribe: + derived_from: tosca.capabilities.Root + properties: + format: + type: string + route: + type: string + version: + type: string +relationship_types: + cloudify.relationships.depends_on: + derived_from: tosca.relationships.Root + dcae.relationships.component_contained_in: + derived_from: tosca.relationships.Root + dcae.relationships.publish_events: + derived_from: tosca.relationships.Root + dcae.relationships.publish_files: + derived_from: tosca.relationships.Root + dcae.relationships.rework_connected_to: + derived_from: tosca.relationships.Root + dcae.relationships.subscribe_to_events: + derived_from: tosca.relationships.Root + dcae.relationships.subscribe_to_files: + derived_from: tosca.relationships.Root +node_types: + cloudify.dcae.nodes.Root: + derived_from: tosca.nodes.Root + dcae.nodes.ContainerizedComponent: + derived_from: cloudify.dcae.nodes.Root + properties: + application_config: + required: true + type: map + docker_config: + type: map + image: + required: true + type: string + dcae.nodes.ContainerizedServiceComponent: + attributes: + service_component_name: + type: string + derived_from: dcae.nodes.ContainerizedComponent + properties: + location_id: + required: true + type: string + service_component_type: + required: true + type: string + dcae.nodes.ContainerizedServiceComponentUsingDmaap: + derived_from: dcae.nodes.ContainerizedServiceComponent + properties: + streams_publishes: + type: list + streams_subscribes: + type: list + dcae.nodes.DockerContainerForComponents: + attributes: + service_component_name: + type: string + derived_from: cloudify.dcae.nodes.Root + interfaces: + cloudify.interfaces.lifecycle: + start: + inputs: + host_config: + type: map + stop: + inputs: + cleanup_image: + type: boolean + type: tosca.interfaces.Root + properties: + application_config: + required: true + type: map + docker_config: + type: map + image: + required: true + type: string + location_id: + required: true + type: string + service_component_type: + required: true + type: string + requirements: + - host: + capability: dcae.capabilities.dockerHost + relationship: dcae.relationships.component_contained_in + dcae.nodes.DockerContainerForComponentsUsingDmaap: + derived_from: dcae.nodes.DockerContainerForComponents + properties: + application_config: + required: true + type: map + docker_config: + type: map + image: + required: true + type: string + location_id: + required: true + type: string + service_component_type: + required: true + type: string + streams_publishes: + type: list + streams_subscribes: + type: list + attributes: + service_component_name: + type: string + requirements: + - host: + capability: dcae.capabilities.dockerHost + relationship: dcae.relationships.component_contained_in + - stream_subscribe_0: + capability: dcae.capabilities.dmmap.topic + relationship: dcae.relationships.subscribe_to_events + - stream_publish_0: + capability: dcae.capabilities.dmmap.feed + relationship: dcae.relationships.publish_files + - policy: + capability: dcae.capabilities.policy + relationship: cloudify.relationships.depends_on + interfaces: + cloudify.interfaces.lifecycle: + start: + inputs: + host_config: + type: map + stop: + inputs: + cleanup_image: + type: boolean + type: tosca.interfaces.Root + dcae.nodes.ExistingFeed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: cloudify.dcae.nodes.Root + properties: + feed_id: + type: string + dcae.nodes.ExistingTopic: + capabilities: + topic: + type: dcae.capabilities.dmmap.topic + derived_from: cloudify.dcae.nodes.Root + properties: + fqtn: + type: string + dcae.nodes.ExternalTargetFeed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: cloudify.dcae.nodes.Root + properties: + url: + type: string + username: + type: string + userpw: + type: string + dcae.nodes.Feed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: cloudify.dcae.nodes.Root + properties: + feed_name: + type: string + dcae.nodes.MicroService.cdap: + attributes: + service_component_name: + type: string + derived_from: cloudify.dcae.nodes.Root + interfaces: + cloudify.interfaces.lifecycle: + create: + inputs: + connected_broker_dns_name: + type: string + type: tosca.interfaces.Root + properties: + app_config: + required: false + type: map + app_preferences: + required: false + type: map + artifact_name: + required: false + type: string + artifact_version: + required: false + type: string + connections: + required: false + type: map + jar_url: + type: string + namespace: + required: false + type: string + program_preferences: + required: false + type: list + programs: + required: false + type: list + service_component_type: + type: string + service_endpoints: + required: false + type: list + streamname: + required: false + type: string + requirements: + - host: + capability: dcae.capabilities.cdapHost + relationship: dcae.relationships.component_contained_in + dcae.nodes.SelectedDockerHost: + capabilities: + host: + type: dcae.capabilities.dockerHost + derived_from: cloudify.dcae.nodes.Root + properties: + docker_host_override: + type: string + location_id: + required: true + type: string + dcae.nodes.Topic: + capabilities: + topic: + type: dcae.capabilities.dmmap.topic + derived_from: cloudify.dcae.nodes.Root + properties: + topic_name: + type: string + dcae.nodes.composition.virtual: + capabilities: + host: + type: dcae.capabilities.composition.host + derived_from: tosca.nodes.Root + properties: + location_id: + required: true + type: string + service_id: + required: true + type: string + dcae.nodes.policies: + capabilities: + policy: + type: dcae.capabilities.policy + derived_from: cloudify.dcae.nodes.Root + properties: + policy_filter: + type: map + dcae.nodes.policy: + capabilities: + policy: + type: dcae.capabilities.policy + derived_from: cloudify.dcae.nodes.Root + properties: + policy_id: + required: true + type: string + policy.nodes.Root: + derived_from: tosca.nodes.Root + properties: + policyDescription: + required: false + type: string + policyName: + required: true + type: string + policyScope: + required: true + type: string + policyVersion: + required: true + type: string + tosca.dcae.nodes.Root: + derived_from: tosca.nodes.Root + tosca.dcae.nodes.cdapApp: + attributes: + service_component_name: + type: string + derived_from: tosca.dcae.nodes.Root + properties: + connected_broker_dns_name: + default: cdap_broker + required: true + type: string + jar_url: + required: true + type: string + service_component_type: + required: true + type: string + requirements: + - host: + capability: dcae.capabilities.cdapHost + relationship: dcae.relationships.component_contained_in + tosca.dcae.nodes.dmaap.existingFeed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: tosca.dcae.nodes.Root + properties: + feed_id: + type: string + location: + type: string + node_name: + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dmaap.existingTopic: + capabilities: + topic: + type: dcae.capabilities.dmmap.topic + derived_from: tosca.dcae.nodes.Root + properties: + aaf_password: + type: string + aaf_username: + type: string + client_role: + type: string + fqtn: + type: string + location: + type: string + node_name: + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dmaap.externalTargetFeed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: tosca.dcae.nodes.Root + properties: + location: + type: string + node_name: + type: string + url: + type: string + username: + type: string + userpw: + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dmaap.feed: + capabilities: + feed: + type: dcae.capabilities.dmmap.feed + derived_from: tosca.dcae.nodes.Root + properties: + feed_name: + type: string + location: + type: string + node_name: + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dmaap.topic: + capabilities: + topic: + type: dcae.capabilities.dmmap.topic + derived_from: tosca.dcae.nodes.Root + properties: + aaf_password: + type: string + aaf_username: + type: string + client_role: + type: string + location: + type: string + node_name: + type: string + topic_name: + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dockerApp: + attributes: + service_component_name: + type: string + derived_from: tosca.dcae.nodes.Root + properties: + cpu_period: + default: 10000 + required: true + type: integer + cpu_quota: + default: 30000 + required: true + type: integer + cpu_shares: + default: 256 + required: true + type: integer + image: + required: true + type: string + location_id: + required: true + type: string + mem_limit: + default: 500m + required: true + type: string + restart_policy.Name: + default: on-failure + required: true + type: string + restart_policy.max_retry_counts: + default: 3 + required: true + type: integer + service_component_type: + required: true + type: string + requirements: + - host: + capability: dcae.capabilities.dockerHost + relationship: dcae.relationships.component_contained_in + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.dockerApp.dcaegen2.collectors.datafile.datafile-app-server: + derived_from: tosca.dcae.nodes.dockerApp + properties: + buscontroller_feed_publishing_endpoint: + type: string + datafile.policy: + type: string + dmaap_dr_feed_id: + type: string + service_name: + type: string + streams_consumer.datafile_consume_mr.message_router_topic: + type: string + requirements: + - stream_subscribe_0: + capability: dcae.capabilities.dmmap.topic + relationship: dcae.relationships.subscribe_to_events + - stream_publish_0: + capability: dcae.capabilities.dmmap.feed + relationship: dcae.relationships.publish_files + - policy: + capability: dcae.capabilities.policy + relationship: cloudify.relationships.depends_on + tosca.dcae.nodes.dockerHost: + capabilities: + host: + type: dcae.capabilities.dockerHost + derived_from: tosca.dcae.nodes.Root + properties: + docker_host_override: + type: string + location_id: + required: true + type: string + requirements: + - composition: + capability: dcae.capabilities.composition.host + tosca.dcae.nodes.policies: + capabilities: + policy: + type: dcae.capabilities.policy + derived_from: tosca.dcae.nodes.Root + properties: + configAttributes: + type: string + configName: + type: string + onapName: + type: string + policyName: + type: string + unique: + type: boolean + tosca.dcae.nodes.policy: + capabilities: + policy: + type: dcae.capabilities.policy + derived_from: tosca.dcae.nodes.Root + properties: + policy_id: + required: true + type: string + policy_name: + type: string diff --git a/datafile-app-server/dpo/tosca_models/template.yaml b/datafile-app-server/dpo/tosca_models/template.yaml new file mode 100644 index 00000000..89955b2e --- /dev/null +++ b/datafile-app-server/dpo/tosca_models/template.yaml @@ -0,0 +1,103 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +metadata: + template_name: dcaegen2.collectors.datafile.datafile-app-server +imports: +- schema: schema.yaml +topology_template: + inputs: + topic0_aaf_password: + type: string + topic0_aaf_username: + type: string + topic0_client_role: + type: string + node_templates: + dcaegen2.collectors.datafile.datafile-app-server: + type: tosca.dcae.nodes.dockerApp.dcaegen2.collectors.datafile.datafile-app-server + properties: + buscontroller_feed_publishing_endpoint: http://dmaap-bc.onap.svc.cluster.local:8080/webapi/feeds + datafile.policy: '' + dmaap_dr_feed_id: bulk_pm_feed + location_id: + get_property: + - SELF + - composition + - location_id + service_name: datafile + streams_consumer.datafile_consume_mr.message_router_topic: /events/unauthenticated.VES_NOTIFICATION_OUTPUT + requirements: + - stream_subscribe_0: topic0 + - stream_publish_0: feed1 + - policy: policy_0 + feed1: + type: tosca.dcae.nodes.dmaap.feed + properties: + feed_name: '' + location: + get_property: + - SELF + - composition + - location_id + node_name: __GET_NODE_NAME__ + capabilities: + feed: + properties: + feed_name: + get_property: + - SELF + - feed_name + location: + get_property: + - SELF + - location + node_name: + get_property: + - SELF + - node_name + policy_0: + type: tosca.dcae.nodes.policy + properties: + policy_name: policy.nodes.dcaegen2.collectors.datafile.datafile-app-server + topic0: + type: tosca.dcae.nodes.dmaap.topic + properties: + aaf_password: + get_input: topic0_aaf_password + aaf_username: + get_input: topic0_aaf_username + client_role: + get_input: topic0_client_role + location: + get_property: + - SELF + - composition + - location_id + node_name: __GET_NODE_NAME__ + topic_name: '' + capabilities: + topic: + properties: + aaf_password: + get_property: + - SELF + - aaf_password + aaf_username: + get_property: + - SELF + - aaf_username + client_role: + get_property: + - SELF + - client_role + location: + get_property: + - SELF + - location + node_name: + get_property: + - SELF + - node_name + topic_name: + get_property: + - SELF + - topic_name diff --git a/datafile-app-server/dpo/tosca_models/translate.yaml b/datafile-app-server/dpo/tosca_models/translate.yaml new file mode 100644 index 00000000..ef9b40f0 --- /dev/null +++ b/datafile-app-server/dpo/tosca_models/translate.yaml @@ -0,0 +1,153 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +metadata: + template_name: dcaegen2.collectors.datafile.datafile-app-server_translate +imports: +- schema: schema.yaml +topology_template: + inputs: + buscontroller_feed_publishing_endpoint: + type: string + cpu_period: + type: integer + default: 10000 + cpu_quota: + type: integer + default: 30000 + cpu_shares: + type: integer + default: 256 + datafile.policy: + type: string + dmaap_dr_feed_id: + type: string + image: + type: string + location_id: + type: string + mem_limit: + type: string + default: 500m + restart_policy.Name: + type: string + default: on-failure + restart_policy.max_retry_counts: + type: integer + default: 3 + service_component_type: + type: string + service_name: + type: string + streams_consumer.datafile_consume_mr.message_router_topic: + type: string + substitution_mappings: + node_type: tosca.dcae.nodes.dockerApp.dcaegen2.collectors.datafile.datafile-app-server + capabilities: {} + requirements: + host: + - dcaegen2.collectors.datafile.datafile-app-server + - host + policy: + - dcaegen2.collectors.datafile.datafile-app-server + - policy + stream_publish_0: + - dcaegen2.collectors.datafile.datafile-app-server + - stream_publish_0 + stream_subscribe_0: + - dcaegen2.collectors.datafile.datafile-app-server + - stream_subscribe_0 + node_templates: + dcaegen2.collectors.datafile.datafile-app-server: + type: dcae.nodes.DockerContainerForComponentsUsingDmaap + properties: + application_config: + buscontroller_feed_publishing_endpoint: + get_input: buscontroller_feed_publishing_endpoint + datafile.policy: + get_input: datafile.policy + dmaap_dr_feed_id: + get_input: dmaap_dr_feed_id + service_name: + get_input: service_name + services_calls: {} + streams_consumer.datafile_consume_mr.message_router_topic: + get_input: streams_consumer.datafile_consume_mr.message_router_topic + streams_publishes: + datafile_publish_dr: + dmaap_info: + concat: + - '<<' + - get_property: + - SELF + - stream_publish_0 + - node_name + - '>>' + type: data_router + streams_subscribes: + datafile_subscribe_mr: + aaf_password: + get_property: + - SELF + - stream_subscribe_0 + - aaf_password + aaf_username: + get_property: + - SELF + - stream_subscribe_0 + - aaf_username + dmaap_info: + concat: + - '<<' + - get_property: + - SELF + - stream_subscribe_0 + - node_name + - '>>' + type: message_router + docker_config: + healthcheck: + endpoint: /heartbeat + interval: 15s + timeout: 1s + type: http + image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:latest + location_id: + get_input: location_id + service_component_type: dcaegen2.collectors.datafile.datafile-app-server + streams_publishes: + - location: + get_property: + - SELF + - stream_publish_0 + - location + name: + get_property: + - SELF + - stream_publish_0 + - node_name + type: data_router + streams_subscribes: + - client_role: + get_property: + - SELF + - stream_subscribe_0 + - client_role + location: + get_property: + - SELF + - stream_subscribe_0 + - location + name: + get_property: + - SELF + - stream_subscribe_0 + - node_name + type: message_router + interfaces: + cloudify.interfaces.lifecycle: + type: tosca.interfaces.Root + start: + inputs: + host_config: null + stop: + inputs: + cleanup_image: null diff --git a/datafile-app-server/pom.xml b/datafile-app-server/pom.xml index 4e8f5c58..ace0389c 100644 --- a/datafile-app-server/pom.xml +++ b/datafile-app-server/pom.xml @@ -1,7 +1,7 @@ <?xml version="1.0" encoding="UTF-8"?> <!-- ~ ============LICENSE_START======================================================= - ~ Copyright (C) 2018 NOKIA Intellectual Property, 2018 Nordix Foundation. All rights reserved. + ~ Copyright (C) 2018 NOKIA Intellectual Property, 2018-2019 Nordix Foundation. All rights reserved. ~ ================================================================================ ~ Licensed under the Apache License, Version 2.0 (the "License"); ~ you may not use this file except in compliance with the License. @@ -61,7 +61,7 @@ <imageTags> <tag>latest</tag> </imageTags> - <baseImage>openjdk:8-jre-alpine</baseImage> + <dockerDirectory>${project.basedir}/src/main/docker</dockerDirectory> <resources> <resource> <directory>${project.build.directory}</directory> @@ -74,11 +74,6 @@ <include>*</include> </resource> </resources> - <exposes> - <expose>8100</expose> - <expose>8433</expose> - </exposes> - <cmd>["java", "-jar", "/target/${project.artifactId}.jar"]</cmd> </configuration> <executions> <execution> @@ -215,5 +210,10 @@ <groupId>io.springfox</groupId> <artifactId>springfox-swagger-ui</artifactId> </dependency> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-configuration-processor</artifactId> + <optional>true</optional> + </dependency> </dependencies> </project> diff --git a/datafile-app-server/src/main/docker/Dockerfile b/datafile-app-server/src/main/docker/Dockerfile new file mode 100644 index 00000000..6ab30cc2 --- /dev/null +++ b/datafile-app-server/src/main/docker/Dockerfile @@ -0,0 +1,41 @@ +# +# ============LICENSE_START======================================================= +# Copyright (C) 2019 Nordix Foundation. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= +# +FROM openjdk:8-jre-alpine + +WORKDIR /opt/app/datafile +RUN mkdir -p /var/log/ONAP + +ADD /target/datafile-app-server.jar /opt/app/datafile/ + +ADD /config/application.yaml /opt/app/datafile/config/ +ADD /config/cacerts /opt/app/datafile/config/ +ADD /config/datafile_endpoints.json /opt/app/datafile/config/ +ADD /config/ftpKey.jks /opt/app/datafile/config/ +ADD /config/keystore /opt/app/datafile/config/ + +EXPOSE 8100 8433 + +RUN addgroup -S onap && adduser -S datafile -G onap +RUN chown -R datafile:onap /opt/app/datafile +RUN chown -R datafile:onap /var/log/ONAP + +USER datafile + +ENTRYPOINT ["/usr/bin/java", "-jar", "/opt/app/datafile/datafile-app-server.jar"]
\ No newline at end of file diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/AppConfig.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/AppConfig.java index 5bbacb14..40de33dd 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/AppConfig.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/AppConfig.java @@ -20,14 +20,21 @@ package org.onap.dcaegen2.collectors.datafile.configuration; import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapConsumerConfiguration; import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapPublisherConfiguration; -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.ImmutableDmaapConsumerConfiguration; -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.ImmutableDmaapPublisherConfiguration; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; +import java.io.*; +import java.util.ServiceLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.stereotype.Component; - -import java.util.Optional; -import java.util.function.Predicate; +import javax.validation.constraints.NotEmpty; +import javax.validation.constraints.NotNull; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.JsonSyntaxException; +import com.google.gson.TypeAdapterFactory; /** * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 3/23/18 @@ -35,199 +42,95 @@ import java.util.function.Predicate; */ @Component -@Configuration -public class AppConfig extends DatafileAppConfig { - - private static Predicate<String> isEmpty = String::isEmpty; - @Value("${dmaap.dmaapConsumerConfiguration.dmaapHostName:}") - public String consumerDmaapHostName; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapPortNumber:}") - public Integer consumerDmaapPortNumber; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapTopicName:}") - public String consumerDmaapTopicName; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapProtocol:}") - public String consumerDmaapProtocol; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapUserName:}") - public String consumerDmaapUserName; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapUserPassword:}") - public String consumerDmaapUserPassword; - - @Value("${dmaap.dmaapConsumerConfiguration.dmaapContentType:}") - public String consumerDmaapContentType; - - @Value("${dmaap.dmaapConsumerConfiguration.consumerId:}") - public String consumerId; - - @Value("${dmaap.dmaapConsumerConfiguration.consumerGroup:}") - public String consumerGroup; - - @Value("${dmaap.dmaapConsumerConfiguration.timeoutMs:}") - public Integer consumerTimeoutMs; - - @Value("${dmaap.dmaapConsumerConfiguration.message-limit:}") - public Integer consumerMessageLimit; - - @Value("${dmaap.dmaapProducerConfiguration.dmaapHostName:}") - public String producerDmaapHostName; - - @Value("${dmaap.dmaapProducerConfiguration.dmaapPortNumber:}") - public Integer producerDmaapPortNumber; - - @Value("${dmaap.dmaapProducerConfiguration.dmaapTopicName:}") - public String producerDmaapTopicName; +@EnableConfigurationProperties +@ConfigurationProperties("app") +public class AppConfig { - @Value("${dmaap.dmaapProducerConfiguration.dmaapProtocol:}") - public String producerDmaapProtocol; + private static final String CONFIG = "configs"; + private static final String DMAAP = "dmaap"; + private static final String DMAAP_PRODUCER = "dmaapProducerConfiguration"; + private static final String DMAAP_CONSUMER = "dmaapConsumerConfiguration"; + private static final String FTP = "ftp"; + private static final String FTPES_CONFIGURATION = "ftpesConfiguration"; + private static final String SECURITY = "security"; + private static final Logger logger = LoggerFactory.getLogger(AppConfig.class); - @Value("${dmaap.dmaapProducerConfiguration.dmaapUserName:}") - public String producerDmaapUserName; + DmaapConsumerConfiguration dmaapConsumerConfiguration; - @Value("${dmaap.dmaapProducerConfiguration.dmaapUserPassword:}") - public String producerDmaapUserPassword; + DmaapPublisherConfiguration dmaapPublisherConfiguration; - @Value("${dmaap.dmaapProducerConfiguration.dmaapContentType:}") - public String producerDmaapContentType; + FtpesConfig ftpesConfig; - @Value("${ftp.ftpesConfiguration.keyCert:}") - public String keyCert; + @NotEmpty + private String filepath; - @Value("${ftp.ftpesConfiguration.keyPassword:}") - public String keyPassword; - - @Value("${ftp.ftpesConfiguration.trustedCA:}") - public String trustedCA; + public DmaapConsumerConfiguration getDmaapConsumerConfiguration() { + return dmaapConsumerConfiguration; + } - @Value("${ftp.ftpesConfiguration.trustedCAPassword:}") - public String trustedCAPassword; + public DmaapPublisherConfiguration getDmaapPublisherConfiguration() { + return dmaapPublisherConfiguration; + } - @Value("${security.trustStorePath:}") - public String trustStorePath; + public FtpesConfig getFtpesConfiguration() { + return ftpesConfig; + } - @Value("${security.trustStorePasswordPath:}") - public String trustStorePasswordPath; + public void initFileStreamReader() { + + GsonBuilder gsonBuilder = new GsonBuilder(); + ServiceLoader.load(TypeAdapterFactory.class).forEach(gsonBuilder::registerTypeAdapterFactory); + JsonParser parser = new JsonParser(); + JsonObject jsonObject; + try (InputStream inputStream = getInputStream(filepath)) { + JsonElement rootElement = getJsonElement(parser, inputStream); + if (rootElement.isJsonObject()) { + jsonObject = rootElement.getAsJsonObject(); + ftpesConfig = deserializeType(gsonBuilder, + jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(FTP).getAsJsonObject(FTPES_CONFIGURATION), + FtpesConfig.class); + dmaapConsumerConfiguration = deserializeType(gsonBuilder, concatenateJsonObjects( + jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(DMAAP).getAsJsonObject(DMAAP_CONSUMER), + rootElement.getAsJsonObject().getAsJsonObject(CONFIG).getAsJsonObject(SECURITY)), + DmaapConsumerConfiguration.class); + + dmaapPublisherConfiguration = deserializeType(gsonBuilder, concatenateJsonObjects( + jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(DMAAP).getAsJsonObject(DMAAP_PRODUCER), + rootElement.getAsJsonObject().getAsJsonObject(CONFIG).getAsJsonObject(SECURITY)), + DmaapPublisherConfiguration.class); + } + } catch (IOException e) { + logger.error("Problem with file loading, file: {}", filepath, e); + } catch (JsonSyntaxException e) { + logger.error("Problem with Json deserialization", e); + } + } - @Value("${security.keyStorePath:}") - public String keyStorePath; + JsonElement getJsonElement(JsonParser parser, InputStream inputStream) { + return parser.parse(new InputStreamReader(inputStream)); + } - @Value("${security.keyStorePasswordPath:}") - public String keyStorePasswordPath; + private <T> T deserializeType(@NotNull GsonBuilder gsonBuilder, @NotNull JsonObject jsonObject, + @NotNull Class<T> type) { + return gsonBuilder.create().fromJson(jsonObject, type); + } - @Value("${security.enableDmaapCertAuth:}") - public Boolean enableDmaapCertAuth; + InputStream getInputStream(@NotNull String filepath) throws IOException { + return new BufferedInputStream(new FileInputStream(filepath)); + } - @Override - public DmaapConsumerConfiguration getDmaapConsumerConfiguration() { - return new ImmutableDmaapConsumerConfiguration.Builder() - .dmaapUserPassword( - Optional.ofNullable(consumerDmaapUserPassword).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapUserPassword())) - .dmaapUserName( - Optional.ofNullable(consumerDmaapUserName).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapUserName())) - .dmaapHostName( - Optional.ofNullable(consumerDmaapHostName).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapHostName())) - .dmaapPortNumber( - Optional.ofNullable(consumerDmaapPortNumber).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapConsumerConfiguration.dmaapPortNumber())) - .dmaapProtocol( - Optional.ofNullable(consumerDmaapProtocol).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapProtocol())) - .dmaapContentType( - Optional.ofNullable(consumerDmaapContentType).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapContentType())) - .dmaapTopicName( - Optional.ofNullable(consumerDmaapTopicName).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.dmaapTopicName())) - .messageLimit( - Optional.ofNullable(consumerMessageLimit).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapConsumerConfiguration.messageLimit())) - .timeoutMs(Optional.ofNullable(consumerTimeoutMs).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapConsumerConfiguration.timeoutMs())) - .consumerGroup(Optional.ofNullable(consumerGroup).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.consumerGroup())) - .consumerId(Optional.ofNullable(consumerId).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.consumerId())) - .trustStorePath( - Optional.ofNullable(trustStorePath).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.trustStorePath())) - .trustStorePasswordPath( - Optional.ofNullable(trustStorePasswordPath).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.trustStorePasswordPath())) - .keyStorePath( - Optional.ofNullable(keyStorePath).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.keyStorePath())) - .keyStorePasswordPath( - Optional.ofNullable(keyStorePasswordPath).filter(isEmpty.negate()) - .orElse(dmaapConsumerConfiguration.keyStorePasswordPath())) - .enableDmaapCertAuth( - Optional.ofNullable(enableDmaapCertAuth).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapConsumerConfiguration.enableDmaapCertAuth())) - .build(); + String getFilepath() { + return this.filepath; } - @Override - public DmaapPublisherConfiguration getDmaapPublisherConfiguration() { - return new ImmutableDmaapPublisherConfiguration.Builder() - .dmaapContentType( - Optional.ofNullable(producerDmaapContentType).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapContentType())) - .dmaapHostName( - Optional.ofNullable(producerDmaapHostName).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapHostName())) - .dmaapPortNumber( - Optional.ofNullable(producerDmaapPortNumber).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapPublisherConfiguration.dmaapPortNumber())) - .dmaapProtocol( - Optional.ofNullable(producerDmaapProtocol).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapProtocol())) - .dmaapTopicName( - Optional.ofNullable(producerDmaapTopicName).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapTopicName())) - .dmaapUserName( - Optional.ofNullable(producerDmaapUserName).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapUserName())) - .dmaapUserPassword( - Optional.ofNullable(producerDmaapUserPassword).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.dmaapUserPassword())) - .trustStorePath( - Optional.ofNullable(trustStorePath).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.trustStorePath())) - .trustStorePasswordPath( - Optional.ofNullable(trustStorePasswordPath).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.trustStorePasswordPath())) - .keyStorePath( - Optional.ofNullable(keyStorePath).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.keyStorePath())) - .keyStorePasswordPath( - Optional.ofNullable(keyStorePasswordPath).filter(isEmpty.negate()) - .orElse(dmaapPublisherConfiguration.keyStorePasswordPath())) - .enableDmaapCertAuth( - Optional.ofNullable(enableDmaapCertAuth).filter(p -> !p.toString().isEmpty()) - .orElse(dmaapPublisherConfiguration.enableDmaapCertAuth())) - .build(); + public void setFilepath(String filepath) { + this.filepath = filepath; } - @Override - public FtpesConfig getFtpesConfiguration() { - return new ImmutableFtpesConfig.Builder() - .keyCert( - Optional.ofNullable(keyCert).filter(isEmpty.negate()) - .orElse(ftpesConfig.keyCert())) - .keyPassword( - Optional.ofNullable(keyPassword).filter(isEmpty.negate()) - .orElse(ftpesConfig.keyPassword())) - .trustedCA( - Optional.ofNullable(trustedCA).filter(isEmpty.negate()) - .orElse(ftpesConfig.trustedCA())) - .trustedCAPassword( - Optional.ofNullable(trustedCAPassword).filter(isEmpty.negate()) - .orElse(ftpesConfig.trustedCAPassword())) - .build(); + private JsonObject concatenateJsonObjects(JsonObject target, JsonObject source) { + source.entrySet() + .forEach(entry -> target.add(entry.getKey(), entry.getValue())); + return target; } + } diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/Config.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/Config.java deleted file mode 100644 index 7fe2561c..00000000 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/Config.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018 NOKIA Intellectual Property, 2018 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.configuration; - - -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapConsumerConfiguration; -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapPublisherConfiguration; - -/** - * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 4/9/18 - * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> - */ -public interface Config { - - DmaapConsumerConfiguration getDmaapConsumerConfiguration(); - - DmaapPublisherConfiguration getDmaapPublisherConfiguration(); - - FtpesConfig getFtpesConfiguration(); - - void initFileStreamReader(); - -} diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfig.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfig.java deleted file mode 100644 index 59bb259d..00000000 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfig.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018 NOKIA Intellectual Property, 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.configuration; - -import com.google.gson.GsonBuilder; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import com.google.gson.JsonSyntaxException; -import com.google.gson.TypeAdapterFactory; - -import java.io.BufferedInputStream; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.util.ServiceLoader; - -import javax.validation.constraints.NotEmpty; -import javax.validation.constraints.NotNull; - -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapConsumerConfiguration; -import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapPublisherConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.context.properties.EnableConfigurationProperties; -import org.springframework.context.annotation.Configuration; - -/** - * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 4/9/18 - * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> - */ -@Configuration -@EnableConfigurationProperties -@ConfigurationProperties("app") -public abstract class DatafileAppConfig implements Config { - - private static final String CONFIG = "configs"; - private static final String DMAAP = "dmaap"; - private static final String DMAAP_PRODUCER = "dmaapProducerConfiguration"; - private static final String DMAAP_CONSUMER = "dmaapConsumerConfiguration"; - private static final String FTP = "ftp"; - private static final String FTPES_CONFIGURATION = "ftpesConfiguration"; - private static final String SECURITY = "security"; - private static final Logger logger = LoggerFactory.getLogger(DatafileAppConfig.class); - - DmaapConsumerConfiguration dmaapConsumerConfiguration; - - DmaapPublisherConfiguration dmaapPublisherConfiguration; - - FtpesConfig ftpesConfig; - - @NotEmpty - private String filepath; - - - @Override - public DmaapConsumerConfiguration getDmaapConsumerConfiguration() { - return dmaapConsumerConfiguration; - } - - @Override - public DmaapPublisherConfiguration getDmaapPublisherConfiguration() { - return dmaapPublisherConfiguration; - } - - @Override - public FtpesConfig getFtpesConfiguration() { - return ftpesConfig; - } - - @Override - public void initFileStreamReader() { - - GsonBuilder gsonBuilder = new GsonBuilder(); - ServiceLoader.load(TypeAdapterFactory.class).forEach(gsonBuilder::registerTypeAdapterFactory); - JsonParser parser = new JsonParser(); - JsonObject jsonObject; - try (InputStream inputStream = getInputStream(filepath)) { - JsonElement rootElement = getJsonElement(parser, inputStream); - if (rootElement.isJsonObject()) { - jsonObject = rootElement.getAsJsonObject(); - ftpesConfig = deserializeType(gsonBuilder, - jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(FTP).getAsJsonObject(FTPES_CONFIGURATION), - FtpesConfig.class); - dmaapConsumerConfiguration = deserializeType(gsonBuilder, concatenateJsonObjects( - jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(DMAAP).getAsJsonObject(DMAAP_CONSUMER), - rootElement.getAsJsonObject().getAsJsonObject(CONFIG).getAsJsonObject(SECURITY)), - DmaapConsumerConfiguration.class); - - dmaapPublisherConfiguration = deserializeType(gsonBuilder, concatenateJsonObjects( - jsonObject.getAsJsonObject(CONFIG).getAsJsonObject(DMAAP).getAsJsonObject(DMAAP_PRODUCER), - rootElement.getAsJsonObject().getAsJsonObject(CONFIG).getAsJsonObject(SECURITY)), - DmaapPublisherConfiguration.class); - } - } catch (IOException e) { - logger.error("Problem with file loading, file: {}", filepath, e); - } catch (JsonSyntaxException e) { - logger.error("Problem with Json deserialization", e); - } - } - - JsonElement getJsonElement(JsonParser parser, InputStream inputStream) { - return parser.parse(new InputStreamReader(inputStream)); - } - - private <T> T deserializeType(@NotNull GsonBuilder gsonBuilder, @NotNull JsonObject jsonObject, - @NotNull Class<T> type) { - return gsonBuilder.create().fromJson(jsonObject, type); - } - - InputStream getInputStream(@NotNull String filepath) throws IOException { - return new BufferedInputStream(new FileInputStream(filepath)); - } - - String getFilepath() { - return this.filepath; - } - - public void setFilepath(String filepath) { - this.filepath = filepath; - } - - private JsonObject concatenateJsonObjects(JsonObject target, JsonObject source) { - source.entrySet() - .forEach(entry -> target.add(entry.getKey(), entry.getValue())); - return target; - } -} diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/SchedulerConfig.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/SchedulerConfig.java index 478ae309..bc21f96c 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/SchedulerConfig.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/configuration/SchedulerConfig.java @@ -21,7 +21,9 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ScheduledFuture; + import javax.annotation.PostConstruct; + import org.onap.dcaegen2.collectors.datafile.tasks.ScheduledTasks; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; @@ -29,6 +31,7 @@ import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.scheduling.TaskScheduler; import org.springframework.scheduling.annotation.EnableScheduling; + import io.swagger.annotations.ApiOperation; import reactor.core.publisher.Mono; @@ -37,10 +40,11 @@ import reactor.core.publisher.Mono; */ @Configuration @EnableScheduling -public class SchedulerConfig extends DatafileAppConfig { +public class SchedulerConfig { - private static final int SCHEDULING_DELAY_FOR_DATAFILE_COLLECTOR_TASKS = 15; - private static final int SCHEDULING_REQUEST_FOR_CONFIGURATION_DELAY = 5; + private static final Duration SCHEDULING_DELAY_FOR_DATAFILE_COLLECTOR_TASKS = Duration.ofSeconds(15); + private static final Duration SCHEDULING_REQUEST_FOR_CONFIGURATION_DELAY = Duration.ofMinutes(5); + private static final Duration SCHEDULING_DELAY_FOR_DATAFILE_PURGE_CACHE = Duration.ofHours(1); private static volatile List<ScheduledFuture<?>> scheduledFutureList = new ArrayList<>(); private final TaskScheduler taskScheduler; @@ -77,11 +81,13 @@ public class SchedulerConfig extends DatafileAppConfig { @ApiOperation(value = "Start task if possible") public synchronized boolean tryToStartTask() { if (scheduledFutureList.isEmpty()) { - scheduledFutureList.add(taskScheduler - .scheduleAtFixedRate(cloudConfiguration::runTask, Instant.now(), - Duration.ofMinutes(SCHEDULING_REQUEST_FOR_CONFIGURATION_DELAY))); + scheduledFutureList.add(taskScheduler.scheduleAtFixedRate(cloudConfiguration::runTask, Instant.now(), + SCHEDULING_REQUEST_FOR_CONFIGURATION_DELAY)); scheduledFutureList.add(taskScheduler.scheduleWithFixedDelay(scheduledTask::scheduleMainDatafileEventTask, - Duration.ofSeconds(SCHEDULING_DELAY_FOR_DATAFILE_COLLECTOR_TASKS))); + SCHEDULING_DELAY_FOR_DATAFILE_COLLECTOR_TASKS)); + scheduledFutureList.add(taskScheduler.scheduleWithFixedDelay(() -> scheduledTask.purgeCachedInformation(Instant.now()), + SCHEDULING_DELAY_FOR_DATAFILE_PURGE_CACHE)); + return true; } else { return false; diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/JsonMessageParser.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/JsonMessageParser.java index 3c606deb..a8f79ea1 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/JsonMessageParser.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/JsonMessageParser.java @@ -123,12 +123,11 @@ public class JsonMessageParser { } private Flux<FileReadyMessage> createMessages(Flux<JsonObject> jsonObject) { - return jsonObject.flatMap(monoJsonP -> !containsNotificationFields(monoJsonP) - ? logErrorAndReturnEmptyMessageFlux("Incorrect JsonObject - missing header. " + jsonObject) - : transformMessages(monoJsonP)); + return jsonObject.flatMap(monoJsonP -> containsNotificationFields(monoJsonP) ? transformMessages(monoJsonP) + : logErrorAndReturnEmptyMessageFlux("Incorrect JsonObject - missing header. " + jsonObject)); } - private Flux<FileReadyMessage> transformMessages(JsonObject message) { + private Mono<FileReadyMessage> transformMessages(JsonObject message) { Optional<MessageMetaData> optionalMessageMetaData = getMessageMetaData(message); if (optionalMessageMetaData.isPresent()) { JsonObject notificationFields = message.getAsJsonObject(EVENT).getAsJsonObject(NOTIFICATION_FIELDS); @@ -138,22 +137,22 @@ public class JsonMessageParser { if (!allFileDataFromJson.isEmpty()) { MessageMetaData messageMetaData = optionalMessageMetaData.get(); // @formatter:off - return Flux.just(ImmutableFileReadyMessage.builder() + return Mono.just(ImmutableFileReadyMessage.builder() .pnfName(messageMetaData.sourceName()) .messageMetaData(messageMetaData) .files(allFileDataFromJson) .build()); // @formatter:on } else { - return Flux.empty(); + return Mono.empty(); } } logger.error("Unable to collect file from xNF. Missing arrayOfNamedHashMap in message. {}", message); - return Flux.empty(); + return Mono.empty(); } logger.error("Unable to collect file from xNF. FileReady event has incorrect JsonObject. {}", message); - return Flux.empty(); + return Mono.empty(); } private Optional<MessageMetaData> getMessageMetaData(JsonObject message) { diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCache.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCache.java new file mode 100644 index 00000000..2cb84112 --- /dev/null +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCache.java @@ -0,0 +1,59 @@ +/* + * ============LICENSE_START====================================================================== + * Copyright (C) 2019 Nordix Foundation. All rights reserved. + * =============================================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + * ============LICENSE_END======================================================================== + */ +package org.onap.dcaegen2.collectors.datafile.service; + +import java.nio.file.Path; +import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +/** + * A cache of all files that already has been published. Key is the local file path and the value is + * a time stamp, when the key was last used. + */ +public class PublishedFileCache { + private final Map<Path, Instant> publishedFiles = Collections.synchronizedMap(new HashMap<Path, Instant>()); + + public Instant put(Path path) { + return publishedFiles.put(path, Instant.now()); + } + + public void remove(Path localFileName) { + publishedFiles.remove(localFileName); + } + + public void purge(Instant now) { + for (Iterator<Map.Entry<Path, Instant>> it = publishedFiles.entrySet().iterator(); it.hasNext();) { + Map.Entry<Path, Instant> pair = it.next(); + if (isCachedPublishedFileOutdated(now, pair.getValue())) { + it.remove(); + } + } + } + + public int size() { + return publishedFiles.size(); + } + + private boolean isCachedPublishedFileOutdated(Instant now, Instant then) { + final int timeToKeepInfoInSeconds = 60 * 60 * 24; + return now.getEpochSecond() - then.getEpochSecond() > timeToKeepInfoInSeconds; + } + + +} diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DMaaPMessageConsumerTask.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DMaaPMessageConsumerTask.java index c41dce5b..f6daf733 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DMaaPMessageConsumerTask.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DMaaPMessageConsumerTask.java @@ -22,7 +22,6 @@ package org.onap.dcaegen2.collectors.datafile.tasks; import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; -import org.onap.dcaegen2.collectors.datafile.configuration.Config; import org.onap.dcaegen2.collectors.datafile.model.FileReadyMessage; import org.onap.dcaegen2.collectors.datafile.service.DmaapReactiveWebClient; import org.onap.dcaegen2.collectors.datafile.service.JsonMessageParser; @@ -31,9 +30,8 @@ import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.service.consume import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.reactive.function.client.WebClient; - -import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.core.publisher.Flux; /** * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> @@ -41,7 +39,7 @@ import reactor.core.publisher.Mono; public class DMaaPMessageConsumerTask { private static final Logger logger = LoggerFactory.getLogger(DMaaPMessageConsumerTask.class); - private Config datafileAppConfig; + private AppConfig datafileAppConfig; private JsonMessageParser jsonMessageParser; private DMaaPConsumerReactiveHttpClient dmaaPConsumerReactiveHttpClient; diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisher.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisher.java index b65ddd63..4c0dcce5 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisher.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisher.java @@ -19,7 +19,6 @@ package org.onap.dcaegen2.collectors.datafile.tasks; import java.time.Duration; import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; -import org.onap.dcaegen2.collectors.datafile.configuration.Config; import org.onap.dcaegen2.collectors.datafile.model.ConsumerDmaapModel; import org.onap.dcaegen2.collectors.datafile.service.HttpUtils; import org.onap.dcaegen2.collectors.datafile.service.producer.DmaapProducerReactiveHttpClient; @@ -28,7 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; -import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; /** * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 4/13/18 @@ -37,7 +36,7 @@ import reactor.core.publisher.Flux; public class DataRouterPublisher { private static final Logger logger = LoggerFactory.getLogger(DataRouterPublisher.class); - private final Config datafileAppConfig; + private final AppConfig datafileAppConfig; public DataRouterPublisher(AppConfig datafileAppConfig) { this.datafileAppConfig = datafileAppConfig; @@ -51,27 +50,27 @@ public class DataRouterPublisher { * @param firstBackoffTimeout the time to delay the first retry * @return the HTTP response status as a string */ - public Flux<ConsumerDmaapModel> execute(ConsumerDmaapModel model, long numRetries, Duration firstBackoff) { + public Mono<ConsumerDmaapModel> execute(ConsumerDmaapModel model, long numRetries, Duration firstBackoff) { logger.trace("Method called with arg {}", model); DmaapProducerReactiveHttpClient dmaapProducerReactiveHttpClient = resolveClient(); //@formatter:off - return Flux.just(model) - .cache(1) + return Mono.just(model) + .cache() .flatMap(dmaapProducerReactiveHttpClient::getDmaapProducerResponse) .flatMap(httpStatus -> handleHttpResponse(httpStatus, model)) .retryBackoff(numRetries, firstBackoff); //@formatter:on } - private Flux<ConsumerDmaapModel> handleHttpResponse(HttpStatus response, ConsumerDmaapModel model) { + private Mono<ConsumerDmaapModel> handleHttpResponse(HttpStatus response, ConsumerDmaapModel model) { if (HttpUtils.isSuccessfulResponseCode(response.value())) { logger.trace("Publish to DR successful!"); - return Flux.just(model); + return Mono.just(model); } else { - logger.warn("Publish to DR unsuccessful, response code: " + response); - return Flux.error(new Exception("Publish to DR unsuccessful, response code: " + response)); + logger.warn("Publish to DR unsuccessful, response code: {}", response); + return Mono.error(new Exception("Publish to DR unsuccessful, response code: " + response)); } } diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/FileCollector.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/FileCollector.java index db18ac2a..0b647bf5 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/FileCollector.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/FileCollector.java @@ -20,7 +20,6 @@ import java.nio.file.Path; import java.time.Duration; import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; -import org.onap.dcaegen2.collectors.datafile.configuration.Config; import org.onap.dcaegen2.collectors.datafile.configuration.FtpesConfig; import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; import org.onap.dcaegen2.collectors.datafile.ftp.FileCollectClient; @@ -41,7 +40,7 @@ import reactor.core.publisher.Mono; public class FileCollector { private static final Logger logger = LoggerFactory.getLogger(FileCollector.class); - private Config datafileAppConfig; + private AppConfig datafileAppConfig; private final FtpsClient ftpsClient; private final SftpClient sftpClient; diff --git a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasks.java b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasks.java index f22c7bf9..783c699c 100644 --- a/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasks.java +++ b/datafile-app-server/src/main/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasks.java @@ -20,11 +20,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; @@ -34,6 +32,7 @@ import org.onap.dcaegen2.collectors.datafile.model.ConsumerDmaapModel; import org.onap.dcaegen2.collectors.datafile.model.FileData; import org.onap.dcaegen2.collectors.datafile.model.FileReadyMessage; import org.onap.dcaegen2.collectors.datafile.model.MessageMetaData; +import org.onap.dcaegen2.collectors.datafile.service.PublishedFileCache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -41,22 +40,23 @@ import org.springframework.stereotype.Component; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import reactor.core.scheduler.Scheduler; import reactor.core.scheduler.Schedulers; /** - * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 3/23/18 - * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> + * This implements the main flow of the data file collector. Fetch file ready events from the + * message router, fetch new files from the PNF publish these in the data router. */ @Component public class ScheduledTasks { private static final int MAX_NUMBER_OF_CONCURRENT_TASKS = 200; + private static final int MAX_ILDLE_THREAD_TIME_TO_LIVE_SECONDS = 10; - /** Data needed for fetching of files from one PNF */ + /** Data needed for fetching of one file */ private class FileCollectionData { final FileData fileData; - final FileCollector collectorTask; // Same object, ftp session etc. can be used for each file in one VES - // event + final FileCollector collectorTask; final MessageMetaData metaData; FileCollectionData(FileData fd, FileCollector collectorTask, MessageMetaData metaData) { @@ -68,15 +68,15 @@ public class ScheduledTasks { private static final Logger logger = LoggerFactory.getLogger(ScheduledTasks.class); private final AppConfig applicationConfiguration; - private final AtomicInteger taskCounter = new AtomicInteger(); - private final Set<Path> alreadyPublishedFiles = Collections.synchronizedSet(new HashSet<Path>()); + private final AtomicInteger currentNumberOfTasks = new AtomicInteger(); + private final Scheduler scheduler = + Schedulers.newElastic("DataFileCollector", MAX_ILDLE_THREAD_TIME_TO_LIVE_SECONDS); + PublishedFileCache alreadyPublishedFiles = new PublishedFileCache(); /** * Constructor for task registration in Datafile Workflow. * * @param applicationConfiguration - application configuration - * @param xnfCollectorTask - second task - * @param dmaapPublisherTask - third task */ @Autowired public ScheduledTasks(AppConfig applicationConfiguration) { @@ -84,52 +84,67 @@ public class ScheduledTasks { } /** - * Main function for scheduling Datafile Workflow. + * Main function for scheduling for the file collection Workflow. */ public void scheduleMainDatafileEventTask() { logger.trace("Execution of tasks was registered"); applicationConfiguration.initFileStreamReader(); - //@formatter:off - consumeMessagesFromDmaap() - .parallel() // Each FileReadyMessage in a separate thread - .runOn(Schedulers.parallel()) - .flatMap(this::createFileCollectionTask) - .filter(this::shouldBePublished) - .doOnNext(fileData -> taskCounter.incrementAndGet()) - .flatMap(this::collectFileFromXnf) - .flatMap(this::publishToDataRouter) - .flatMap(model -> deleteFile(Paths.get(model.getInternalLocation()))) - .doOnNext(model -> taskCounter.decrementAndGet()) - .sequential() - .subscribe(this::onSuccess, this::onError, this::onComplete); - //@formatter:on + createMainTask().subscribe(this::onSuccess, this::onError, this::onComplete); + } + + Flux<ConsumerDmaapModel> createMainTask() { + return fetchMoreFileReadyMessages() // + .parallel(getParallelism()) // Each FileReadyMessage in a separate thread + .runOn(scheduler) // + .flatMap(this::createFileCollectionTask) // + .filter(this::shouldBePublished) // + .doOnNext(fileData -> currentNumberOfTasks.incrementAndGet()) // + .flatMap(this::collectFileFromXnf) // + .flatMap(this::publishToDataRouter) // + .doOnNext(model -> deleteFile(Paths.get(model.getInternalLocation()))) // + .doOnNext(model -> currentNumberOfTasks.decrementAndGet()) // + .sequential(); + } + + /** + * called in regular intervals to remove out-dated cached information + */ + public void purgeCachedInformation(Instant now) { + alreadyPublishedFiles.purge(now); } private void onComplete() { logger.info("Datafile tasks have been completed"); } - private void onSuccess(Path localFile) { - logger.info("Datafile consumed tasks." + localFile); + private void onSuccess(ConsumerDmaapModel model) { + logger.info("Datafile consumed tasks {}", model.getInternalLocation()); } private void onError(Throwable throwable) { logger.error("Chain of tasks have been aborted due to errors in Datafile workflow {}", throwable); } + private int getParallelism() { + if (MAX_NUMBER_OF_CONCURRENT_TASKS - getCurrentNumberOfTasks() > 0) { + return MAX_NUMBER_OF_CONCURRENT_TASKS - getCurrentNumberOfTasks(); + } else { + return 1; // We need at least one rail/thread + } + } + private Flux<FileCollectionData> createFileCollectionTask(FileReadyMessage availableFiles) { List<FileCollectionData> fileCollects = new ArrayList<>(); for (FileData fileData : availableFiles.files()) { - FileCollector task = new FileCollector(applicationConfiguration, - new FtpsClient(fileData.fileServerData()), new SftpClient(fileData.fileServerData())); - fileCollects.add(new FileCollectionData(fileData, task, availableFiles.messageMetaData())); + fileCollects.add( + new FileCollectionData(fileData, createFileCollector(fileData), availableFiles.messageMetaData())); } return Flux.fromIterable(fileCollects); } private boolean shouldBePublished(FileCollectionData task) { - return alreadyPublishedFiles.add(task.fileData.getLocalFileName()); + return alreadyPublishedFiles.put(task.fileData.getLocalFileName()) == null; } private Mono<ConsumerDmaapModel> collectFileFromXnf(FileCollectionData fileCollect) { @@ -138,48 +153,49 @@ public class ScheduledTasks { return fileCollect.collectorTask .execute(fileCollect.fileData, fileCollect.metaData, maxNUmberOfRetries, initialRetryTimeout) - .onErrorResume(exception -> handleCollectFailure(fileCollect.fileData, exception)); + .onErrorResume(exception -> handleCollectFailure(fileCollect.fileData)); } - private Mono<ConsumerDmaapModel> handleCollectFailure(FileData fileData, Throwable exception) { - logger.error("File fetching failed: {}, reason: {}", fileData.name(), exception.getMessage()); - deleteFile(fileData.getLocalFileName()); - alreadyPublishedFiles.remove(fileData.getLocalFileName()); - taskCounter.decrementAndGet(); + private Mono<ConsumerDmaapModel> handleCollectFailure(FileData fileData) { + Path localFileName = fileData.getLocalFileName(); + logger.error("File fetching failed: {}", localFileName); + deleteFile(localFileName); + alreadyPublishedFiles.remove(localFileName); + currentNumberOfTasks.decrementAndGet(); return Mono.empty(); } - private Flux<ConsumerDmaapModel> publishToDataRouter(ConsumerDmaapModel model) { + private Mono<ConsumerDmaapModel> publishToDataRouter(ConsumerDmaapModel model) { final long maxNumberOfRetries = 3; final Duration initialRetryTimeout = Duration.ofSeconds(5); - DataRouterPublisher publisherTask = new DataRouterPublisher(applicationConfiguration); + DataRouterPublisher publisherTask = createDataRouterPublisher(); return publisherTask.execute(model, maxNumberOfRetries, initialRetryTimeout) .onErrorResume(exception -> handlePublishFailure(model, exception)); - } - private Flux<ConsumerDmaapModel> handlePublishFailure(ConsumerDmaapModel model, Throwable exception) { + private Mono<ConsumerDmaapModel> handlePublishFailure(ConsumerDmaapModel model, Throwable exception) { logger.error("File publishing failed: {}, exception: {}", model.getName(), exception); Path internalFileName = Paths.get(model.getInternalLocation()); deleteFile(internalFileName); alreadyPublishedFiles.remove(internalFileName); - taskCounter.decrementAndGet(); - return Flux.empty(); + currentNumberOfTasks.decrementAndGet(); + return Mono.empty(); } - private Flux<FileReadyMessage> consumeMessagesFromDmaap() { - final int currentNumberOfTasks = taskCounter.get(); - logger.trace("Consuming new file ready messages, current number of tasks: {}", currentNumberOfTasks); - if (currentNumberOfTasks > MAX_NUMBER_OF_CONCURRENT_TASKS) { + /** + * Fetch more messages from the message router. This is done in a polling/blocking fashion. + */ + private Flux<FileReadyMessage> fetchMoreFileReadyMessages() { + logger.trace("Consuming new file ready messages, current number of tasks: {}", getCurrentNumberOfTasks()); + if (getCurrentNumberOfTasks() > MAX_NUMBER_OF_CONCURRENT_TASKS) { return Flux.empty(); } - final DMaaPMessageConsumerTask messageConsumerTask = - new DMaaPMessageConsumerTask(this.applicationConfiguration); - return messageConsumerTask.execute() - .onErrorResume(exception -> handleConsumeMessageFailure(exception)); + return createConsumerTask() // + .execute() // + .onErrorResume(this::handleConsumeMessageFailure); } private Flux<FileReadyMessage> handleConsumeMessageFailure(Throwable exception) { @@ -187,13 +203,30 @@ public class ScheduledTasks { return Flux.empty(); } - private Flux<Path> deleteFile(Path localFile) { + private void deleteFile(Path localFile) { logger.trace("Deleting file: {}", localFile); try { Files.delete(localFile); } catch (Exception e) { - logger.warn("Could not delete file: {}, {}", localFile, e); + logger.trace("Could not delete file: {}", localFile); } - return Flux.just(localFile); } + + int getCurrentNumberOfTasks() { + return currentNumberOfTasks.get(); + } + + DMaaPMessageConsumerTask createConsumerTask() { + return new DMaaPMessageConsumerTask(this.applicationConfiguration); + } + + FileCollector createFileCollector(FileData fileData) { + return new FileCollector(applicationConfiguration, new FtpsClient(fileData.fileServerData()), + new SftpClient(fileData.fileServerData())); + } + + DataRouterPublisher createDataRouterPublisher() { + return new DataRouterPublisher(applicationConfiguration); + } + } diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfigTest.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfigTest.java index 2cd854af..443ddae7 100644 --- a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfigTest.java +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/configuration/DatafileAppConfigTest.java @@ -46,33 +46,32 @@ import org.onap.dcaegen2.collectors.datafile.integration.junit5.mockito.MockitoE * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> */ @ExtendWith({MockitoExtension.class}) -class DatafileAppConfigTest { - +class AppConfigTest { + private static final String DATAFILE_ENDPOINTS = "datafile_endpoints.json"; private static final boolean CORRECT_JSON = true; private static final boolean INCORRECT_JSON = false; - private static DatafileAppConfig datafileAppConfig; - private static AppConfig appConfig; + private static AppConfig appConfigUnderTest; + private static String filePath = Objects - .requireNonNull(DatafileAppConfigTest.class.getClassLoader().getResource(DATAFILE_ENDPOINTS)).getFile(); + .requireNonNull(AppConfigTest.class.getClassLoader().getResource(DATAFILE_ENDPOINTS)).getFile(); @BeforeEach public void setUp() { - datafileAppConfig = spy(DatafileAppConfig.class); - appConfig = spy(new AppConfig()); + appConfigUnderTest = spy(AppConfig.class); } @Test public void whenApplicationWasStarted_FilePathIsSet() { // When - datafileAppConfig.setFilepath(filePath); + appConfigUnderTest.setFilepath(filePath); // Then - verify(datafileAppConfig, times(1)).setFilepath(anyString()); - verify(datafileAppConfig, times(0)).initFileStreamReader(); - Assertions.assertEquals(filePath, datafileAppConfig.getFilepath()); + verify(appConfigUnderTest, times(1)).setFilepath(anyString()); + verify(appConfigUnderTest, times(0)).initFileStreamReader(); + Assertions.assertEquals(filePath, appConfigUnderTest.getFilepath()); } @Test @@ -82,23 +81,23 @@ class DatafileAppConfigTest { new ByteArrayInputStream((getJsonConfig(CORRECT_JSON).getBytes(StandardCharsets.UTF_8))); // When - datafileAppConfig.setFilepath(filePath); - doReturn(inputStream).when(datafileAppConfig).getInputStream(any()); - datafileAppConfig.initFileStreamReader(); - appConfig.dmaapConsumerConfiguration = datafileAppConfig.getDmaapConsumerConfiguration(); - appConfig.dmaapPublisherConfiguration = datafileAppConfig.getDmaapPublisherConfiguration(); - appConfig.ftpesConfig = datafileAppConfig.getFtpesConfiguration(); + appConfigUnderTest.setFilepath(filePath); + doReturn(inputStream).when(appConfigUnderTest).getInputStream(any()); + appConfigUnderTest.initFileStreamReader(); + appConfigUnderTest.dmaapConsumerConfiguration = appConfigUnderTest.getDmaapConsumerConfiguration(); + appConfigUnderTest.dmaapPublisherConfiguration = appConfigUnderTest.getDmaapPublisherConfiguration(); + appConfigUnderTest.ftpesConfig = appConfigUnderTest.getFtpesConfiguration(); // Then - verify(datafileAppConfig, times(1)).setFilepath(anyString()); - verify(datafileAppConfig, times(1)).initFileStreamReader(); - Assertions.assertNotNull(datafileAppConfig.getDmaapConsumerConfiguration()); - Assertions.assertNotNull(datafileAppConfig.getDmaapPublisherConfiguration()); - Assertions.assertEquals(appConfig.getDmaapPublisherConfiguration(), - datafileAppConfig.getDmaapPublisherConfiguration()); - Assertions.assertEquals(appConfig.getDmaapConsumerConfiguration(), - datafileAppConfig.getDmaapConsumerConfiguration()); - Assertions.assertEquals(appConfig.getFtpesConfiguration(), datafileAppConfig.getFtpesConfiguration()); + verify(appConfigUnderTest, times(1)).setFilepath(anyString()); + verify(appConfigUnderTest, times(1)).initFileStreamReader(); + Assertions.assertNotNull(appConfigUnderTest.getDmaapConsumerConfiguration()); + Assertions.assertNotNull(appConfigUnderTest.getDmaapPublisherConfiguration()); + Assertions.assertEquals(appConfigUnderTest.getDmaapPublisherConfiguration(), + appConfigUnderTest.getDmaapPublisherConfiguration()); + Assertions.assertEquals(appConfigUnderTest.getDmaapConsumerConfiguration(), + appConfigUnderTest.getDmaapConsumerConfiguration()); + Assertions.assertEquals(appConfigUnderTest.getFtpesConfiguration(), appConfigUnderTest.getFtpesConfiguration()); } @@ -106,17 +105,17 @@ class DatafileAppConfigTest { public void whenFileIsNotExist_ThrowIoException() { // Given filePath = "/temp.json"; - datafileAppConfig.setFilepath(filePath); + appConfigUnderTest.setFilepath(filePath); // When - datafileAppConfig.initFileStreamReader(); + appConfigUnderTest.initFileStreamReader(); // Then - verify(datafileAppConfig, times(1)).setFilepath(anyString()); - verify(datafileAppConfig, times(1)).initFileStreamReader(); - Assertions.assertNull(datafileAppConfig.getDmaapConsumerConfiguration()); - Assertions.assertNull(datafileAppConfig.getDmaapPublisherConfiguration()); - Assertions.assertNull(datafileAppConfig.getFtpesConfiguration()); + verify(appConfigUnderTest, times(1)).setFilepath(anyString()); + verify(appConfigUnderTest, times(1)).initFileStreamReader(); + Assertions.assertNull(appConfigUnderTest.getDmaapConsumerConfiguration()); + Assertions.assertNull(appConfigUnderTest.getDmaapPublisherConfiguration()); + Assertions.assertNull(appConfigUnderTest.getFtpesConfiguration()); } @@ -127,16 +126,16 @@ class DatafileAppConfigTest { new ByteArrayInputStream((getJsonConfig(INCORRECT_JSON).getBytes(StandardCharsets.UTF_8))); // When - datafileAppConfig.setFilepath(filePath); - doReturn(inputStream).when(datafileAppConfig).getInputStream(any()); - datafileAppConfig.initFileStreamReader(); + appConfigUnderTest.setFilepath(filePath); + doReturn(inputStream).when(appConfigUnderTest).getInputStream(any()); + appConfigUnderTest.initFileStreamReader(); // Then - verify(datafileAppConfig, times(1)).setFilepath(anyString()); - verify(datafileAppConfig, times(1)).initFileStreamReader(); - Assertions.assertNotNull(datafileAppConfig.getDmaapConsumerConfiguration()); - Assertions.assertNull(datafileAppConfig.getDmaapPublisherConfiguration()); - Assertions.assertNotNull(datafileAppConfig.getFtpesConfiguration()); + verify(appConfigUnderTest, times(1)).setFilepath(anyString()); + verify(appConfigUnderTest, times(1)).initFileStreamReader(); + Assertions.assertNotNull(appConfigUnderTest.getDmaapConsumerConfiguration()); + Assertions.assertNull(appConfigUnderTest.getDmaapPublisherConfiguration()); + Assertions.assertNotNull(appConfigUnderTest.getFtpesConfiguration()); } @@ -147,22 +146,22 @@ class DatafileAppConfigTest { InputStream inputStream = new ByteArrayInputStream((getJsonConfig(CORRECT_JSON).getBytes(StandardCharsets.UTF_8))); // When - datafileAppConfig.setFilepath(filePath); - doReturn(inputStream).when(datafileAppConfig).getInputStream(any()); + appConfigUnderTest.setFilepath(filePath); + doReturn(inputStream).when(appConfigUnderTest).getInputStream(any()); JsonElement jsonElement = mock(JsonElement.class); when(jsonElement.isJsonObject()).thenReturn(false); - doReturn(jsonElement).when(datafileAppConfig).getJsonElement(any(JsonParser.class), any(InputStream.class)); - datafileAppConfig.initFileStreamReader(); - appConfig.dmaapConsumerConfiguration = datafileAppConfig.getDmaapConsumerConfiguration(); - appConfig.dmaapPublisherConfiguration = datafileAppConfig.getDmaapPublisherConfiguration(); - appConfig.ftpesConfig = datafileAppConfig.getFtpesConfiguration(); + doReturn(jsonElement).when(appConfigUnderTest).getJsonElement(any(JsonParser.class), any(InputStream.class)); + appConfigUnderTest.initFileStreamReader(); + appConfigUnderTest.dmaapConsumerConfiguration = appConfigUnderTest.getDmaapConsumerConfiguration(); + appConfigUnderTest.dmaapPublisherConfiguration = appConfigUnderTest.getDmaapPublisherConfiguration(); + appConfigUnderTest.ftpesConfig = appConfigUnderTest.getFtpesConfiguration(); // Then - verify(datafileAppConfig, times(1)).setFilepath(anyString()); - verify(datafileAppConfig, times(1)).initFileStreamReader(); - Assertions.assertNull(datafileAppConfig.getDmaapConsumerConfiguration()); - Assertions.assertNull(datafileAppConfig.getDmaapPublisherConfiguration()); - Assertions.assertNull(datafileAppConfig.getFtpesConfiguration()); + verify(appConfigUnderTest, times(1)).setFilepath(anyString()); + verify(appConfigUnderTest, times(1)).initFileStreamReader(); + Assertions.assertNull(appConfigUnderTest.getDmaapConsumerConfiguration()); + Assertions.assertNull(appConfigUnderTest.getDmaapPublisherConfiguration()); + Assertions.assertNull(appConfigUnderTest.getFtpesConfiguration()); } private String getJsonConfig(boolean correct) { diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/integration/ServiceMockProvider.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/integration/ServiceMockProvider.java index 05a4f515..0d5ea003 100644 --- a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/integration/ServiceMockProvider.java +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/integration/ServiceMockProvider.java @@ -2,7 +2,7 @@ * ============LICENSE_START======================================================= * PROJECT * ================================================================================ - * Copyright (C) 2018 NOKIA Intellectual Property, 2018 Nordix Foundation. All rights reserved. + * Copyright (C) 2018-2019 NOKIA Intellectual Property, 2018 Nordix Foundation. All rights reserved. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,7 +22,7 @@ package org.onap.dcaegen2.collectors.datafile.integration; import static org.mockito.Mockito.mock; -import org.onap.dcaegen2.collectors.datafile.configuration.DatafileAppConfig; +import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; import org.onap.dcaegen2.collectors.datafile.model.ConsumerDmaapModel; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -34,8 +34,8 @@ import org.springframework.context.annotation.Configuration; class ServiceMockProvider { @Bean - public DatafileAppConfig getDatafileAppConfig() { - return mock(DatafileAppConfig.class); + public AppConfig getDatafileAppConfig() { + return mock(AppConfig.class); } @Bean diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCacheTest.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCacheTest.java new file mode 100644 index 00000000..7b38ee42 --- /dev/null +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/service/PublishedFileCacheTest.java @@ -0,0 +1,64 @@ +/* + * ============LICENSE_START====================================================================== + * Copyright (C) 2019 Nordix Foundation. All rights reserved. + * =============================================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + * ============LICENSE_END======================================================================== + */ + +package org.onap.dcaegen2.collectors.datafile.service; + +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +public class PublishedFileCacheTest { + + private static PublishedFileCache testObject; + + @BeforeAll + public static void setUp() { + testObject = new PublishedFileCache(); + } + + @Test + public void purgeFiles_timeNotExpired() { + Assertions.assertNull(testObject.put(Paths.get("A"))); + Assertions.assertNotNull(testObject.put(Paths.get("A"))); + testObject.put(Paths.get("B")); + + testObject.purge(Instant.now()); + Assertions.assertEquals(2, testObject.size()); + } + + @Test + public void purgeFiles_timeExpired() { + testObject.put(Paths.get("A")); + testObject.put(Paths.get("B")); + testObject.put(Paths.get("C")); + + testObject.purge(Instant.MAX); + Assertions.assertEquals(0, testObject.size()); + } + + @Test + public void purgeFiles_remove() { + Path path = Paths.get("A"); + testObject.put(path); + Assertions.assertEquals(1, testObject.size()); + testObject.remove(path); + Assertions.assertEquals(0, testObject.size()); + } +} diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisherTest.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisherTest.java index 73511d19..24b82fe6 100644 --- a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisherTest.java +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/DataRouterPublisherTest.java @@ -37,7 +37,7 @@ import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapPub import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.ImmutableDmaapPublisherConfiguration; import org.springframework.http.HttpStatus; -import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; import reactor.test.StepVerifier; /** @@ -61,42 +61,43 @@ class DataRouterPublisherTest { @BeforeAll public static void setUp() { - //@formatter:off + dmaapPublisherConfiguration = new ImmutableDmaapPublisherConfiguration.Builder() - .dmaapContentType("application/json") - .dmaapHostName("54.45.33.2") - .dmaapPortNumber(1234) - .dmaapProtocol("https") - .dmaapUserName("DFC") - .dmaapUserPassword("DFC") - .dmaapTopicName("unauthenticated.VES_NOTIFICATION_OUTPUT") - .trustStorePath("trustStorePath") - .trustStorePasswordPath("trustStorePasswordPath") - .keyStorePath("keyStorePath") - .keyStorePasswordPath("keyStorePasswordPath") - .enableDmaapCertAuth(true) - .build(); + .dmaapContentType("application/json") // + .dmaapHostName("54.45.33.2") // + .dmaapPortNumber(1234) // + .dmaapProtocol("https") // + .dmaapUserName("DFC") // + .dmaapUserPassword("DFC") // + .dmaapTopicName("unauthenticated.VES_NOTIFICATION_OUTPUT") // + .trustStorePath("trustStorePath") // + .trustStorePasswordPath("trustStorePasswordPath") // + .keyStorePath("keyStorePath") // + .keyStorePasswordPath("keyStorePasswordPath") // + .enableDmaapCertAuth(true) // + .build(); // consumerDmaapModel = ImmutableConsumerDmaapModel.builder() - .productName(PRODUCT_NAME) - .vendorName(VENDOR_NAME) - .lastEpochMicrosec(LAST_EPOCH_MICROSEC) - .sourceName(SOURCE_NAME) - .startEpochMicrosec(START_EPOCH_MICROSEC) - .timeZoneOffset(TIME_ZONE_OFFSET) - .name(PM_FILE_NAME) - .location("ftpes://192.168.0.101:22/ftp/rop/" + PM_FILE_NAME) - .internalLocation("target/" + PM_FILE_NAME) - .compression("gzip") - .fileFormatType("org.3GPP.32.435#measCollec") - .fileFormatVersion("V10") - .build(); + .productName(PRODUCT_NAME) // + .vendorName(VENDOR_NAME) // + .lastEpochMicrosec(LAST_EPOCH_MICROSEC) // + .sourceName(SOURCE_NAME) // + .startEpochMicrosec(START_EPOCH_MICROSEC) // + .timeZoneOffset(TIME_ZONE_OFFSET) // + .name(PM_FILE_NAME) // + .location("ftpes://192.168.0.101:22/ftp/rop/" + PM_FILE_NAME) // + .internalLocation("target/" + PM_FILE_NAME) // + .compression("gzip") // + .fileFormatType("org.3GPP.32.435#measCollec") // + .fileFormatVersion("V10") // + .build(); // appConfig = mock(AppConfig.class); - //@formatter:on + + doReturn(dmaapPublisherConfiguration).when(appConfig).getDmaapPublisherConfiguration(); } @Test public void whenPassedObjectFits_ReturnsCorrectStatus() { - prepareMocksForTests(Flux.just(HttpStatus.OK)); + prepareMocksForTests(Mono.just(HttpStatus.OK)); StepVerifier.create(dmaapPublisherTask.execute(consumerDmaapModel, 1, Duration.ofSeconds(0))) .expectNext(consumerDmaapModel).verifyComplete(); @@ -107,7 +108,7 @@ class DataRouterPublisherTest { @Test public void whenPassedObjectFits_firstFailsThenSucceeds() { - prepareMocksForTests(Flux.just(HttpStatus.BAD_GATEWAY), Flux.just(HttpStatus.OK)); + prepareMocksForTests(Mono.just(HttpStatus.BAD_GATEWAY), Mono.just(HttpStatus.OK)); StepVerifier.create(dmaapPublisherTask.execute(consumerDmaapModel, 1, Duration.ofSeconds(0))) .expectNext(consumerDmaapModel).verifyComplete(); @@ -118,7 +119,7 @@ class DataRouterPublisherTest { @Test public void whenPassedObjectFits_firstFailsThenFails() { - prepareMocksForTests(Flux.just(HttpStatus.BAD_GATEWAY), Flux.just(HttpStatus.BAD_GATEWAY)); + prepareMocksForTests(Mono.just(HttpStatus.BAD_GATEWAY), Mono.just(HttpStatus.BAD_GATEWAY)); StepVerifier.create(dmaapPublisherTask.execute(consumerDmaapModel, 1, Duration.ofSeconds(0))) .expectErrorMessage("Retries exhausted: 1/1").verify(); @@ -128,11 +129,11 @@ class DataRouterPublisherTest { } @SafeVarargs - final void prepareMocksForTests(Flux<HttpStatus> firstResponse, Flux<HttpStatus>... nextHttpResponses) { + final void prepareMocksForTests(Mono<HttpStatus> firstResponse, Mono<HttpStatus>... nextHttpResponses) { dMaaPProducerReactiveHttpClient = mock(DmaapProducerReactiveHttpClient.class); when(dMaaPProducerReactiveHttpClient.getDmaapProducerResponse(any())).thenReturn(firstResponse, nextHttpResponses); - when(appConfig.getDmaapPublisherConfiguration()).thenReturn(dmaapPublisherConfiguration); + dmaapPublisherTask = spy(new DataRouterPublisher(appConfig)); when(dmaapPublisherTask.resolveConfiguration()).thenReturn(dmaapPublisherConfiguration); doReturn(dMaaPProducerReactiveHttpClient).when(dmaapPublisherTask).resolveClient(); diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasksTest.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasksTest.java new file mode 100644 index 00000000..0662216b --- /dev/null +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/ScheduledTasksTest.java @@ -0,0 +1,285 @@ +/* + * ============LICENSE_START====================================================================== + * Copyright (C) 2019 Nordix Foundation. All rights reserved. + * =============================================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + * ============LICENSE_END======================================================================== + */ + +package org.onap.dcaegen2.collectors.datafile.tasks; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.notNull; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +import java.time.Duration; +import java.util.LinkedList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.onap.dcaegen2.collectors.datafile.configuration.AppConfig; +import org.onap.dcaegen2.collectors.datafile.ftp.Scheme; +import org.onap.dcaegen2.collectors.datafile.model.ConsumerDmaapModel; +import org.onap.dcaegen2.collectors.datafile.model.FileData; +import org.onap.dcaegen2.collectors.datafile.model.FileReadyMessage; +import org.onap.dcaegen2.collectors.datafile.model.ImmutableConsumerDmaapModel; +import org.onap.dcaegen2.collectors.datafile.model.ImmutableFileData; +import org.onap.dcaegen2.collectors.datafile.model.ImmutableFileReadyMessage; +import org.onap.dcaegen2.collectors.datafile.model.ImmutableMessageMetaData; +import org.onap.dcaegen2.collectors.datafile.model.MessageMetaData; +import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.DmaapPublisherConfiguration; +import org.onap.dcaegen2.services.sdk.rest.services.dmaap.client.config.ImmutableDmaapPublisherConfiguration; + +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.test.StepVerifier; + +public class ScheduledTasksTest { + + private static final String PM_FILE_NAME = "A20161224.1030-1045.bin.gz"; + + private AppConfig appConfig = mock(AppConfig.class); + private ScheduledTasks testedObject = spy(new ScheduledTasks(appConfig)); + + private int uniqueValue = 0; + private DMaaPMessageConsumerTask consumerMock; + private FileCollector fileCollectorMock; + private DataRouterPublisher dataRouterMock; + + @BeforeEach + private void setUp() { + DmaapPublisherConfiguration dmaapPublisherConfiguration = new ImmutableDmaapPublisherConfiguration.Builder() // + .dmaapContentType("application/json") // + .dmaapHostName("54.45.33.2") // + .dmaapPortNumber(1234) // + .dmaapProtocol("https") // + .dmaapUserName("DFC") // + .dmaapUserPassword("DFC") // + .dmaapTopicName("unauthenticated.VES_NOTIFICATION_OUTPUT") // + .trustStorePath("trustStorePath") // + .trustStorePasswordPath("trustStorePasswordPath") // + .keyStorePath("keyStorePath") // + .keyStorePasswordPath("keyStorePasswordPath") // + .enableDmaapCertAuth(true) // + .build(); // + + consumerMock = mock(DMaaPMessageConsumerTask.class); + fileCollectorMock = mock(FileCollector.class); + dataRouterMock = mock(DataRouterPublisher.class); + + doReturn(dmaapPublisherConfiguration).when(appConfig).getDmaapPublisherConfiguration(); + doReturn(consumerMock).when(testedObject).createConsumerTask(); + doReturn(fileCollectorMock).when(testedObject).createFileCollector(notNull()); + doReturn(dataRouterMock).when(testedObject).createDataRouterPublisher(); + } + + private MessageMetaData messageMetaData() { + return ImmutableMessageMetaData.builder() // + .productName("productName") // + .vendorName("") // + .lastEpochMicrosec("") // + .sourceName("") // + .startEpochMicrosec("") // + .timeZoneOffset("") // + .changeIdentifier("") // + .changeType("") // + .build(); + } + + private FileData fileData(int instanceNumber) { + return ImmutableFileData.builder() // + .name("name" + instanceNumber) // + .fileFormatType("") // + .fileFormatVersion("") // + .location("ftpes://192.168.0.101/ftp/rop/" + PM_FILE_NAME + instanceNumber) // + .scheme(Scheme.FTPS) // + .compression("") // + .build(); + } + + private List<FileData> files(int size, boolean uniqueNames) { + List<FileData> list = new LinkedList<FileData>(); + for (int i = 0; i < size; ++i) { + if (uniqueNames) { + ++uniqueValue; + } + list.add(fileData(uniqueValue)); + } + return list; + } + + private FileReadyMessage createFileReadyMessage(int numberOfFiles, boolean uniqueNames) { + MessageMetaData md = messageMetaData(); + return ImmutableFileReadyMessage.builder().pnfName(md.sourceName()).messageMetaData(md) + .files(files(numberOfFiles, uniqueNames)).build(); + } + + private Flux<FileReadyMessage> fileReadyMessageFlux(int numberOfEvents, int filesPerEvent, boolean uniqueNames) { + List<FileReadyMessage> list = new LinkedList<FileReadyMessage>(); + for (int i = 0; i < numberOfEvents; ++i) { + list.add(createFileReadyMessage(filesPerEvent, uniqueNames)); + } + return Flux.fromIterable(list); + } + + private ConsumerDmaapModel consumerData() { + return ImmutableConsumerDmaapModel // + .builder() // + .productName("") // + .vendorName("") // + .lastEpochMicrosec("") // + .sourceName("") // + .startEpochMicrosec("") // + .timeZoneOffset("") // + .name("") // + .location("") // + .internalLocation("internalLocation") // + .compression("") // + .fileFormatType("") // + .fileFormatVersion("") // + .build(); + } + + @Test + public void notingToConsume() { + doReturn(consumerMock).when(testedObject).createConsumerTask(); + doReturn(Flux.empty()).when(consumerMock).execute(); + + testedObject.scheduleMainDatafileEventTask(); + + assertEquals(0, testedObject.getCurrentNumberOfTasks()); + verify(consumerMock, times(1)).execute(); + verifyNoMoreInteractions(consumerMock); + } + + @Test + public void consume_successfulCase() { + final int noOfEvents = 200; + final int noOfFilesPerEvent = 200; + final int noOfFiles = noOfEvents * noOfFilesPerEvent; + + Flux<FileReadyMessage> fileReadyMessages = fileReadyMessageFlux(noOfEvents, noOfFilesPerEvent, true); + doReturn(fileReadyMessages).when(consumerMock).execute(); + + Mono<ConsumerDmaapModel> collectedFile = Mono.just(consumerData()); + doReturn(collectedFile).when(fileCollectorMock).execute(notNull(), notNull(), anyLong(), notNull()); + doReturn(collectedFile).when(dataRouterMock).execute(notNull(), anyLong(), notNull()); + + StepVerifier.create(testedObject.createMainTask()).expectSubscription() // + .expectNextCount(noOfFiles) // + .expectComplete() // + .verify(); // + + assertEquals(0, testedObject.getCurrentNumberOfTasks()); + verify(consumerMock, times(1)).execute(); + verify(fileCollectorMock, times(noOfFiles)).execute(notNull(), notNull(), anyLong(), notNull()); + verify(dataRouterMock, times(noOfFiles)).execute(notNull(), anyLong(), notNull()); + verifyNoMoreInteractions(dataRouterMock); + verifyNoMoreInteractions(fileCollectorMock); + verifyNoMoreInteractions(consumerMock); + } + + @Test + public void consume_fetchFailedOnce() { + Flux<FileReadyMessage> fileReadyMessages = fileReadyMessageFlux(2, 2, true); // 4 files + doReturn(fileReadyMessages).when(consumerMock).execute(); + + Mono<ConsumerDmaapModel> collectedFile = Mono.just(consumerData()); + Mono<Object> error = Mono.error(new Exception("problem")); + + // First file collect will fail, 3 will succeed + doReturn(error, collectedFile, collectedFile, collectedFile) // + .when(fileCollectorMock) // + .execute(any(FileData.class), any(MessageMetaData.class), anyLong(), any(Duration.class)); + + doReturn(collectedFile).when(dataRouterMock).execute(notNull(), anyLong(), notNull()); + doReturn(collectedFile).when(dataRouterMock).execute(notNull(), anyLong(), notNull()); + + StepVerifier.create(testedObject.createMainTask()).expectSubscription() // + .expectNextCount(3) // + .expectComplete() // + .verify(); // + + assertEquals(0, testedObject.getCurrentNumberOfTasks()); + verify(consumerMock, times(1)).execute(); + verify(fileCollectorMock, times(4)).execute(notNull(), notNull(), anyLong(), notNull()); + verify(dataRouterMock, times(3)).execute(notNull(), anyLong(), notNull()); + verifyNoMoreInteractions(dataRouterMock); + verifyNoMoreInteractions(fileCollectorMock); + verifyNoMoreInteractions(consumerMock); + } + + @Test + public void consume_publishFailedOnce() { + + Flux<FileReadyMessage> fileReadyMessages = fileReadyMessageFlux(2, 2, true); // 4 files + doReturn(fileReadyMessages).when(consumerMock).execute(); + + Mono<ConsumerDmaapModel> collectedFile = Mono.just(consumerData()); + doReturn(collectedFile).when(fileCollectorMock).execute(notNull(), notNull(), anyLong(), notNull()); + + Mono<Object> error = Mono.error(new Exception("problem")); + // One publish will fail, the rest will succeed + doReturn(collectedFile, error, collectedFile, collectedFile) // + .when(dataRouterMock) // + .execute(notNull(), anyLong(), notNull()); + + StepVerifier.create(testedObject.createMainTask()).expectSubscription() // + .expectNextCount(3) // 3 completed files + .expectComplete() // + .verify(); // + + assertEquals(0, testedObject.getCurrentNumberOfTasks()); + verify(consumerMock, times(1)).execute(); + verify(fileCollectorMock, times(4)).execute(notNull(), notNull(), anyLong(), notNull()); + verify(dataRouterMock, times(4)).execute(notNull(), anyLong(), notNull()); + verifyNoMoreInteractions(dataRouterMock); + verifyNoMoreInteractions(fileCollectorMock); + verifyNoMoreInteractions(consumerMock); + } + + @Test + public void consume_successfulCase_sameFileNames() { + final int noOfEvents = 1; + final int noOfFilesPerEvent = 100; + + // 100 files with the same name + Flux<FileReadyMessage> fileReadyMessages = fileReadyMessageFlux(noOfEvents, noOfFilesPerEvent, false); + doReturn(fileReadyMessages).when(consumerMock).execute(); + + Mono<ConsumerDmaapModel> collectedFile = Mono.just(consumerData()); + doReturn(collectedFile).when(fileCollectorMock).execute(notNull(), notNull(), anyLong(), notNull()); + doReturn(collectedFile).when(dataRouterMock).execute(notNull(), anyLong(), notNull()); + + StepVerifier.create(testedObject.createMainTask()).expectSubscription() // + .expectNextCount(1) // 99 is skipped + .expectComplete() // + .verify(); // + + assertEquals(0, testedObject.getCurrentNumberOfTasks()); + verify(consumerMock, times(1)).execute(); + verify(fileCollectorMock, times(1)).execute(notNull(), notNull(), anyLong(), notNull()); + verify(dataRouterMock, times(1)).execute(notNull(), anyLong(), notNull()); + verifyNoMoreInteractions(dataRouterMock); + verifyNoMoreInteractions(fileCollectorMock); + verifyNoMoreInteractions(consumerMock); + } + + +} diff --git a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/XnfCollectorTaskImplTest.java b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/XnfCollectorTaskImplTest.java index 10c5b167..804b46e9 100644 --- a/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/XnfCollectorTaskImplTest.java +++ b/datafile-app-server/src/test/java/org/onap/dcaegen2/collectors/datafile/tasks/XnfCollectorTaskImplTest.java @@ -67,7 +67,12 @@ public class XnfCollectorTaskImplTest { private static final String PWD = "pwd"; private static final String FTPES_LOCATION = FTPES_SCHEME + USER + ":" + PWD + "@" + SERVER_ADDRESS + ":" + PORT_22 + REMOTE_FILE_LOCATION; + + private static final String FTPES_LOCATION_NO_PORT = + FTPES_SCHEME + USER + ":" + PWD + "@" + SERVER_ADDRESS + REMOTE_FILE_LOCATION; private static final String SFTP_LOCATION = SFTP_SCHEME + SERVER_ADDRESS + ":" + PORT_22 + REMOTE_FILE_LOCATION; + private static final String SFTP_LOCATION_NO_PORT = SFTP_SCHEME + SERVER_ADDRESS + REMOTE_FILE_LOCATION; + private static final String GZIP_COMPRESSION = "gzip"; private static final String MEAS_COLLECT_FILE_FORMAT_TYPE = "org.3GPP.32.435#measCollec"; private static final String FILE_FORMAT_VERSION = "V10"; @@ -100,11 +105,11 @@ public class XnfCollectorTaskImplTest { // @formatter:on } - private FileData createFileData() { + private FileData createFileData(String location) { // @formatter:off return ImmutableFileData.builder() .name(PM_FILE_NAME) - .location(FTPES_LOCATION) + .location(location) .compression(GZIP_COMPRESSION) .fileFormatType(MEAS_COLLECT_FILE_FORMAT_TYPE) .fileFormatVersion(FILE_FORMAT_VERSION) @@ -113,7 +118,7 @@ public class XnfCollectorTaskImplTest { // @formatter:on } - private ConsumerDmaapModel createExpectedConsumerDmaapModel() { + private ConsumerDmaapModel createExpectedConsumerDmaapModel(String location) { // @formatter:off return ImmutableConsumerDmaapModel.builder() .productName(PRODUCT_NAME) @@ -123,7 +128,7 @@ public class XnfCollectorTaskImplTest { .startEpochMicrosec(START_EPOCH_MICROSEC) .timeZoneOffset(TIME_ZONE_OFFSET) .name(PM_FILE_NAME) - .location(FTPES_LOCATION) + .location(location) .internalLocation(LOCAL_FILE_LOCATION.toString()) .compression(GZIP_COMPRESSION) .fileFormatType(MEAS_COLLECT_FILE_FORMAT_TYPE) @@ -146,9 +151,9 @@ public class XnfCollectorTaskImplTest { FileCollector collectorUndetTest = new FileCollector(appConfigMock, ftpsClientMock, sftpClientMock); - FileData fileData = createFileData(); + FileData fileData = createFileData(FTPES_LOCATION_NO_PORT); - ConsumerDmaapModel expectedConsumerDmaapModel = createExpectedConsumerDmaapModel(); + ConsumerDmaapModel expectedConsumerDmaapModel = createExpectedConsumerDmaapModel(FTPES_LOCATION_NO_PORT); StepVerifier.create(collectorUndetTest.execute(fileData, createMessageMetaData(), 3, Duration.ofSeconds(0))) .expectNext(expectedConsumerDmaapModel).verifyComplete(); @@ -168,7 +173,7 @@ public class XnfCollectorTaskImplTest { // @formatter:off FileData fileData = ImmutableFileData.builder() .name(PM_FILE_NAME) - .location(SFTP_LOCATION) + .location(SFTP_LOCATION_NO_PORT) .compression(GZIP_COMPRESSION) .fileFormatType(MEAS_COLLECT_FILE_FORMAT_TYPE) .fileFormatVersion(FILE_FORMAT_VERSION) @@ -183,7 +188,7 @@ public class XnfCollectorTaskImplTest { .startEpochMicrosec(START_EPOCH_MICROSEC) .timeZoneOffset(TIME_ZONE_OFFSET) .name(PM_FILE_NAME) - .location(SFTP_LOCATION) + .location(SFTP_LOCATION_NO_PORT) .internalLocation(LOCAL_FILE_LOCATION.toString()) .compression(GZIP_COMPRESSION) .fileFormatType(MEAS_COLLECT_FILE_FORMAT_TYPE) @@ -202,7 +207,7 @@ public class XnfCollectorTaskImplTest { public void whenFtpesFileAlwaysFail_retryAndFail() throws Exception { FileCollector collectorUndetTest = new FileCollector(appConfigMock, ftpsClientMock, sftpClientMock); - FileData fileData = createFileData(); + FileData fileData = createFileData(FTPES_LOCATION); doThrow(new DatafileTaskException("Unable to collect file.")).when(ftpsClientMock) .collectFile(REMOTE_FILE_LOCATION, LOCAL_FILE_LOCATION); @@ -219,9 +224,9 @@ public class XnfCollectorTaskImplTest { doThrow(new DatafileTaskException("Unable to collect file.")).doNothing().when(ftpsClientMock) .collectFile(REMOTE_FILE_LOCATION, LOCAL_FILE_LOCATION); - ConsumerDmaapModel expectedConsumerDmaapModel = createExpectedConsumerDmaapModel(); + ConsumerDmaapModel expectedConsumerDmaapModel = createExpectedConsumerDmaapModel(FTPES_LOCATION_NO_PORT); - FileData fileData = createFileData(); + FileData fileData = createFileData(FTPES_LOCATION_NO_PORT); StepVerifier.create(collectorUndetTest.execute(fileData, createMessageMetaData(), 3, Duration.ofSeconds(0))) .expectNext(expectedConsumerDmaapModel).verifyComplete(); diff --git a/datafile-commons/src/main/java/org/onap/dcaegen2/collectors/datafile/exceptions/DatafileTaskException.java b/datafile-commons/src/main/java/org/onap/dcaegen2/collectors/datafile/exceptions/DatafileTaskException.java index ae1435ca..442b766b 100644 --- a/datafile-commons/src/main/java/org/onap/dcaegen2/collectors/datafile/exceptions/DatafileTaskException.java +++ b/datafile-commons/src/main/java/org/onap/dcaegen2/collectors/datafile/exceptions/DatafileTaskException.java @@ -32,4 +32,8 @@ public class DatafileTaskException extends Exception { public DatafileTaskException(String message) { super(message); } + + public DatafileTaskException(String message, Exception e) { + super(message + e); + } } diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FTPSClientWrapper.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FTPSClientWrapper.java deleted file mode 100644 index 29160c94..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FTPSClientWrapper.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.ftp; - -import java.io.IOException; -import java.io.OutputStream; -import javax.net.ssl.KeyManager; -import javax.net.ssl.TrustManager; -import org.apache.commons.net.ftp.FTPSClient; -import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; - -public class FTPSClientWrapper implements IFTPSClient { - private FTPSClient ftpsClient = new FTPSClient(); - - @Override - public void setNeedClientAuth(boolean isNeedClientAuth) { - ftpsClient.setNeedClientAuth(isNeedClientAuth); - } - - @Override - public void setKeyManager(KeyManager keyManager) { - ftpsClient.setKeyManager(keyManager); - } - - @Override - public void setTrustManager(TrustManager trustManager) { - ftpsClient.setTrustManager(trustManager); - } - - @Override - public void connect(String hostName, int port) throws IOException { - ftpsClient.connect(hostName, port); - } - - @Override - public boolean login(String username, String password) throws IOException { - return ftpsClient.login(username, password); - } - - @Override - public boolean logout() throws IOException { - return ftpsClient.logout(); - } - - @Override - public int getReplyCode() { - return ftpsClient.getReplyCode(); - } - - @Override - public void disconnect() throws IOException { - ftpsClient.disconnect(); - } - - @Override - public void enterLocalPassiveMode() { - ftpsClient.enterLocalPassiveMode(); - } - - @Override - public void setFileType(int fileType) throws IOException { - ftpsClient.setFileType(fileType); - } - - @Override - public void execPBSZ(int psbz) throws IOException { - ftpsClient.execPBSZ(psbz); - } - - @Override - public void execPROT(String prot) throws IOException { - ftpsClient.execPROT(prot); - } - - @Override - public void retrieveFile(String remote, OutputStream local) throws DatafileTaskException { - try { - if (!ftpsClient.retrieveFile(remote, local)) { - throw new DatafileTaskException("could not retrieve file"); - } - } catch (IOException e) { - throw new DatafileTaskException(e); - } - } - - @Override - public void setTimeout(Integer t) { - this.ftpsClient.setDefaultTimeout(t); - } - - @Override - public boolean isConnected() { - return ftpsClient.isConnected(); - } - - @Override - public void setBufferSize(int bufSize) { - ftpsClient.setBufferSize(bufSize); - } -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FileCollectClient.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FileCollectClient.java index f330b673..bedae43a 100644 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FileCollectClient.java +++ b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FileCollectClient.java @@ -17,11 +17,13 @@ package org.onap.dcaegen2.collectors.datafile.ftp; import java.nio.file.Path; + import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; /** * @author <a href="mailto:henrik.b.andersson@est.tech">Henrik Andersson</a> */ +@FunctionalInterface public interface FileCollectClient { public void collectFile(String remoteFile, Path localFile) throws DatafileTaskException; } diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClient.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClient.java index 461b2200..c3b7990f 100644 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClient.java +++ b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClient.java @@ -16,6 +16,8 @@ package org.onap.dcaegen2.collectors.datafile.ftp; +import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -27,12 +29,10 @@ import java.util.Optional; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPReply; +import org.apache.commons.net.ftp.FTPSClient; import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; import org.onap.dcaegen2.collectors.datafile.io.FileSystemResourceWrapper; -import org.onap.dcaegen2.collectors.datafile.io.FileWrapper; -import org.onap.dcaegen2.collectors.datafile.io.IFile; import org.onap.dcaegen2.collectors.datafile.io.IFileSystemResource; -import org.onap.dcaegen2.collectors.datafile.io.IOutputStream; import org.onap.dcaegen2.collectors.datafile.ssl.IKeyManagerUtils; import org.onap.dcaegen2.collectors.datafile.ssl.IKeyManagerUtils.KeyManagerException; import org.onap.dcaegen2.collectors.datafile.ssl.IKeyStore; @@ -55,13 +55,11 @@ public class FtpsClient implements FileCollectClient { private Path trustedCAPath; private String trustedCAPassword; - private IFTPSClient realFtpsClient = new FTPSClientWrapper(); + private FTPSClient realFtpsClient = new FTPSClient(); private IKeyManagerUtils keyManagerUtils = new KeyManagerUtilsWrapper(); private IKeyStore keyStore; private ITrustManagerFactory trustManagerFactory; - private IFile localFile = new FileWrapper(); private IFileSystemResource fileSystemResource = new FileSystemResourceWrapper(); - private IOutputStream outputStream; private boolean keyManagerSet = false; private boolean trustManagerSet = false; private final FileServerData fileServerData; @@ -83,7 +81,7 @@ public class FtpsClient implements FileCollectClient { getFileFromxNF(realFtpsClient, remoteFile, localFile); } catch (IOException e) { logger.trace("", e); - throw new DatafileTaskException("Could not open connection: " + e); + throw new DatafileTaskException("Could not open connection: ", e); } catch (KeyManagerException e) { logger.trace("", e); throw new DatafileTaskException(e); @@ -93,7 +91,7 @@ public class FtpsClient implements FileCollectClient { logger.trace("collectFile fetched: {}", localFile); } - private void setUpKeyManager(IFTPSClient ftps) throws KeyManagerException { + private void setUpKeyManager(FTPSClient ftps) throws KeyManagerException { if (keyManagerSet) { logger.trace("keyManager already set!"); } else { @@ -104,7 +102,7 @@ public class FtpsClient implements FileCollectClient { logger.trace("complete setUpKeyManager"); } - private void setUpTrustedCA(IFTPSClient ftps) throws DatafileTaskException { + private void setUpTrustedCA(FTPSClient ftps) throws DatafileTaskException { if (trustManagerSet) { logger.trace("trustManager already set!"); } else { @@ -130,7 +128,7 @@ public class FtpsClient implements FileCollectClient { return port.isPresent() ? port.get() : FTPS_DEFAULT_PORT; } - private void setUpConnection(IFTPSClient ftps) throws DatafileTaskException, IOException { + private void setUpConnection(FTPSClient ftps) throws DatafileTaskException, IOException { if (!ftps.isConnected()) { ftps.connect(fileServerData.serverAddress(), getPort(fileServerData.port())); logger.trace("after ftp connect"); @@ -155,23 +153,26 @@ public class FtpsClient implements FileCollectClient { logger.trace("setUpConnection successfully!"); } - private void getFileFromxNF(IFTPSClient ftps, String remoteFileName, Path localFileName) - throws IOException, DatafileTaskException { + private void getFileFromxNF(FTPSClient ftps, String remoteFileName, Path localFileName) + throws IOException { logger.trace("starting to getFile"); - this.localFile.setPath(localFileName); - this.localFile.createNewFile(); - - OutputStream output = this.outputStream.getOutputStream(this.localFile.getFile()); + File localFile = localFileName.toFile(); + if (localFile.createNewFile()) { + logger.warn("Local file {} already created", localFileName); + } + OutputStream output = new FileOutputStream(localFile); logger.trace("begin to retrieve from xNF."); - ftps.retrieveFile(remoteFileName, output); + if (!ftps.retrieveFile(remoteFileName, output)) { + throw new IOException("Could not retrieve file"); + } logger.trace("end retrieve from xNF."); output.close(); logger.debug("File {} Download Successfull from xNF", localFileName); } - private void closeDownConnection(IFTPSClient ftps) { + private void closeDownConnection(FTPSClient ftps) { logger.trace("starting to closeDownConnection"); if (ftps != null && ftps.isConnected()) { try { @@ -220,7 +221,7 @@ public class FtpsClient implements FileCollectClient { return keyStore; } - void setFtpsClient(IFTPSClient ftpsClient) { + void setFtpsClient(FTPSClient ftpsClient) { this.realFtpsClient = ftpsClient; } @@ -236,14 +237,6 @@ public class FtpsClient implements FileCollectClient { trustManagerFactory = tmf; } - void setFile(IFile file) { - localFile = file; - } - - void setOutputStream(IOutputStream outputStream) { - this.outputStream = outputStream; - } - void setFileSystemResource(IFileSystemResource fileSystemResource) { this.fileSystemResource = fileSystemResource; } diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/IFTPSClient.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/IFTPSClient.java deleted file mode 100644 index 3dcaa656..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/ftp/IFTPSClient.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. See the License for the specific language governing permissions and limitations under - * the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.ftp; - -import java.io.IOException; -import java.io.OutputStream; -import javax.net.ssl.KeyManager; -import javax.net.ssl.TrustManager; -import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; - -public interface IFTPSClient { - public void setNeedClientAuth(boolean isNeedClientAuth); - - public void setKeyManager(KeyManager keyManager); - - public void setTrustManager(TrustManager trustManager); - - public void connect(String hostname, int port) throws IOException; - - public boolean login(String username, String password) throws IOException; - - public boolean logout() throws IOException; - - public int getReplyCode(); - - public void setBufferSize(int bufSize); - - public boolean isConnected(); - - public void disconnect() throws IOException; - - public void enterLocalPassiveMode(); - - public void setFileType(int fileType) throws IOException; - - public void execPBSZ(int newParam) throws IOException; - - public void execPROT(String prot) throws IOException; - - public void retrieveFile(String remote, OutputStream local) throws DatafileTaskException; - - void setTimeout(Integer t); -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/FileWrapper.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/FileWrapper.java deleted file mode 100644 index 203a5985..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/FileWrapper.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.io; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; - -public class FileWrapper implements IFile { - private File file; - - @Override - public void setPath(Path path) { - file = path.toFile(); - } - - @Override - public boolean createNewFile() throws IOException { - if (file == null) { - throw new IOException("Path to file not set."); - } - return file.createNewFile(); - } - - @Override - public File getFile() { - return file; - } - - @Override - public boolean delete() { - return file.delete(); - } -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IFile.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IFile.java deleted file mode 100644 index 2b95842f..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IFile.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.io; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Path; - -public interface IFile { - public void setPath(Path path); - - public boolean createNewFile() throws IOException; - - public File getFile(); - - public boolean delete(); -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IOutputStream.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IOutputStream.java deleted file mode 100644 index 8015ea76..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/IOutputStream.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.io; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.OutputStream; - -@FunctionalInterface -public interface IOutputStream { - public OutputStream getOutputStream(File file) throws FileNotFoundException; -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/OutputStreamWrapper.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/OutputStreamWrapper.java deleted file mode 100644 index 88787826..00000000 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/io/OutputStreamWrapper.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * ============LICENSE_START====================================================================== - * Copyright (C) 2018-2019 Nordix Foundation. All rights reserved. - * =============================================================================================== - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END======================================================================== - */ - -package org.onap.dcaegen2.collectors.datafile.io; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.OutputStream; - -public class OutputStreamWrapper implements IOutputStream { - - @Override - public OutputStream getOutputStream(File file) throws FileNotFoundException { - return new FileOutputStream(file); - } - -} diff --git a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/service/producer/DmaapProducerReactiveHttpClient.java b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/service/producer/DmaapProducerReactiveHttpClient.java index bced3d85..4869e4c2 100644 --- a/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/service/producer/DmaapProducerReactiveHttpClient.java +++ b/datafile-dmaap-client/src/main/java/org/onap/dcaegen2/collectors/datafile/service/producer/DmaapProducerReactiveHttpClient.java @@ -53,7 +53,7 @@ import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.web.util.DefaultUriBuilderFactory; -import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; /** * @author <a href="mailto:przemyslaw.wasala@nokia.com">Przemysław Wąsala</a> on 7/4/18 @@ -101,7 +101,7 @@ public class DmaapProducerReactiveHttpClient { * @param consumerDmaapModel - object which will be sent to DMaaP DataRouter * @return status code of operation */ - public Flux<HttpStatus> getDmaapProducerResponse(ConsumerDmaapModel consumerDmaapModel) { + public Mono<HttpStatus> getDmaapProducerResponse(ConsumerDmaapModel consumerDmaapModel) { logger.trace("Entering getDmaapProducerResponse with {}", consumerDmaapModel); try { logger.trace("Starting to publish to DR {}", consumerDmaapModel.getInternalLocation()); @@ -116,12 +116,12 @@ public class DmaapProducerReactiveHttpClient { Future<HttpResponse> future = webClient.execute(put, null); HttpResponse response = future.get(); - logger.trace(response.toString()); + logger.trace("{}", response); webClient.close(); - return Flux.just(HttpStatus.valueOf(response.getStatusLine().getStatusCode())); + return Mono.just(HttpStatus.valueOf(response.getStatusLine().getStatusCode())); } catch (Exception e) { logger.error("Unable to send file to DataRouter. Data: {}", consumerDmaapModel.getInternalLocation(), e); - return Flux.error(e); + return Mono.error(e); } } diff --git a/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClientTest.java b/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClientTest.java index c4577262..670b1bdc 100644 --- a/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClientTest.java +++ b/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/FtpsClientTest.java @@ -24,7 +24,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -39,12 +38,11 @@ import javax.net.ssl.TrustManager; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPReply; +import org.apache.commons.net.ftp.FTPSClient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.onap.dcaegen2.collectors.datafile.exceptions.DatafileTaskException; -import org.onap.dcaegen2.collectors.datafile.io.IFile; +import org.mockito.ArgumentMatchers; import org.onap.dcaegen2.collectors.datafile.io.IFileSystemResource; -import org.onap.dcaegen2.collectors.datafile.io.IOutputStream; import org.onap.dcaegen2.collectors.datafile.ssl.IKeyManagerUtils; import org.onap.dcaegen2.collectors.datafile.ssl.IKeyStore; import org.onap.dcaegen2.collectors.datafile.ssl.ITrustManagerFactory; @@ -64,36 +62,31 @@ public class FtpsClientTest { private static final String USERNAME = "bob"; private static final String PASSWORD = "123"; - private IFTPSClient ftpsClientMock = mock(IFTPSClient.class); + private FTPSClient ftpsClientMock = mock(FTPSClient.class); private IKeyManagerUtils keyManagerUtilsMock = mock(IKeyManagerUtils.class); private KeyManager keyManagerMock = mock(KeyManager.class); private IKeyStore keyStoreWrapperMock = mock(IKeyStore.class); private KeyStore keyStoreMock = mock(KeyStore.class); private ITrustManagerFactory trustManagerFactoryMock = mock(ITrustManagerFactory.class); private TrustManager trustManagerMock = mock(TrustManager.class); - private IFile localFileMock = mock(IFile.class); private IFileSystemResource fileResourceMock = mock(IFileSystemResource.class); - private IOutputStream outputStreamMock = mock(IOutputStream.class); private InputStream inputStreamMock = mock(InputStream.class); FtpsClient clientUnderTest = new FtpsClient(createFileServerData()); private ImmutableFileServerData createFileServerData() { - return ImmutableFileServerData.builder().serverAddress(XNF_ADDRESS) - .userId(USERNAME).password(PASSWORD).port(PORT).build(); + return ImmutableFileServerData.builder().serverAddress(XNF_ADDRESS).userId(USERNAME).password(PASSWORD) + .port(PORT).build(); } - @BeforeEach protected void setUp() throws Exception { clientUnderTest.setFtpsClient(ftpsClientMock); clientUnderTest.setKeyManagerUtils(keyManagerUtilsMock); clientUnderTest.setKeyStore(keyStoreWrapperMock); clientUnderTest.setTrustManagerFactory(trustManagerFactoryMock); - clientUnderTest.setFile(localFileMock); clientUnderTest.setFileSystemResource(fileResourceMock); - clientUnderTest.setOutputStream(outputStreamMock); clientUnderTest.setKeyCertPath(FTP_KEY_PATH); clientUnderTest.setKeyCertPassword(FTP_KEY_PASSWORD); @@ -109,11 +102,10 @@ public class FtpsClientTest { when(trustManagerFactoryMock.getTrustManagers()).thenReturn(new TrustManager[] {trustManagerMock}); when(ftpsClientMock.login(USERNAME, PASSWORD)).thenReturn(true); when(ftpsClientMock.getReplyCode()).thenReturn(HttpStatus.OK.value()); - File fileMock = mock(File.class); - when(localFileMock.getFile()).thenReturn(fileMock); - OutputStream osMock = mock(OutputStream.class); - when(outputStreamMock.getOutputStream(fileMock)).thenReturn(osMock); + when(ftpsClientMock.isConnected()).thenReturn(false, true); + when(ftpsClientMock.retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), + ArgumentMatchers.any(OutputStream.class))).thenReturn(true); clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH); @@ -133,10 +125,8 @@ public class FtpsClientTest { verify(ftpsClientMock).execPROT("P"); verify(ftpsClientMock).setFileType(FTP.BINARY_FILE_TYPE); verify(ftpsClientMock).setBufferSize(1024 * 1024); - verify(localFileMock).setPath(LOCAL_FILE_PATH); - verify(localFileMock, times(1)).createNewFile(); - verify(ftpsClientMock).retrieveFile(REMOTE_FILE_PATH, osMock); - verify(osMock, times(1)).close(); + verify(ftpsClientMock).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), + ArgumentMatchers.any(OutputStream.class)); verify(ftpsClientMock, times(1)).logout(); verify(ftpsClientMock, times(1)).disconnect(); verify(ftpsClientMock, times(2)).isConnected(); @@ -149,8 +139,8 @@ public class FtpsClientTest { .setCredentials(FTP_KEY_PATH, FTP_KEY_PASSWORD); when(ftpsClientMock.isConnected()).thenReturn(false); - assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("org.onap.dcaegen2.collectors.datafile.ssl.IKeyManagerUtils$KeyManagerException: java.security.GeneralSecurityException"); + assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)).hasMessage( + "org.onap.dcaegen2.collectors.datafile.ssl.IKeyManagerUtils$KeyManagerException: java.security.GeneralSecurityException"); verify(ftpsClientMock).setNeedClientAuth(true); verify(keyManagerUtilsMock).setCredentials(FTP_KEY_PATH, FTP_KEY_PASSWORD); @@ -167,7 +157,7 @@ public class FtpsClientTest { doThrow(new KeyStoreException()).when(trustManagerFactoryMock).init(keyStoreMock); assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("Unable to trust xNF's CA, trustedCAPath java.security.KeyStoreException"); + .hasMessage("Unable to trust xNF's CA, trustedCAPath java.security.KeyStoreException"); } @Test @@ -203,7 +193,7 @@ public class FtpsClientTest { when(ftpsClientMock.getReplyCode()).thenReturn(FTPReply.BAD_COMMAND_SEQUENCE); assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("Unable to connect to xNF. 127.0.0.1 xNF reply code: 503"); + .hasMessage("Unable to connect to xNF. 127.0.0.1 xNF reply code: 503"); verify(ftpsClientMock).setNeedClientAuth(true); verify(keyManagerUtilsMock).setCredentials(FTP_KEY_PATH, FTP_KEY_PASSWORD); @@ -230,7 +220,7 @@ public class FtpsClientTest { doThrow(new IOException()).when(ftpsClientMock).connect(XNF_ADDRESS, PORT); assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("Could not open connection: java.io.IOException"); + .hasMessage("Could not open connection: java.io.IOException"); verify(ftpsClientMock).setNeedClientAuth(true); verify(keyManagerUtilsMock).setCredentials(FTP_KEY_PATH, FTP_KEY_PASSWORD); @@ -254,10 +244,8 @@ public class FtpsClientTest { when(ftpsClientMock.login(USERNAME, PASSWORD)).thenReturn(true); when(ftpsClientMock.getReplyCode()).thenReturn(HttpStatus.OK.value()); - doThrow(new IOException()).when(localFileMock).createNewFile(); - - assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("Could not open connection: java.io.IOException"); + assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, Paths.get(""))) + .hasMessage("Could not open connection: java.io.IOException: No such file or directory"); } @@ -269,14 +257,13 @@ public class FtpsClientTest { when(trustManagerFactoryMock.getTrustManagers()).thenReturn(new TrustManager[] {trustManagerMock}); when(ftpsClientMock.login(USERNAME, PASSWORD)).thenReturn(true); when(ftpsClientMock.getReplyCode()).thenReturn(HttpStatus.OK.value()); - File fileMock = mock(File.class); - when(localFileMock.getFile()).thenReturn(fileMock); - OutputStream osMock = mock(OutputStream.class); - when(outputStreamMock.getOutputStream(fileMock)).thenReturn(osMock); - doThrow(new DatafileTaskException("problemas")).when(ftpsClientMock).retrieveFile(REMOTE_FILE_PATH, osMock); + when(ftpsClientMock.isConnected()).thenReturn(false); + + doThrow(new IOException("problemas")).when(ftpsClientMock).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), + ArgumentMatchers.any(OutputStream.class)); assertThatThrownBy(() -> clientUnderTest.collectFile(REMOTE_FILE_PATH, LOCAL_FILE_PATH)) - .hasMessage("problemas"); + .hasMessage("Could not open connection: java.io.IOException: problemas"); verify(ftpsClientMock).setNeedClientAuth(true); verify(keyManagerUtilsMock).setCredentials(FTP_KEY_PATH, FTP_KEY_PASSWORD); @@ -294,9 +281,8 @@ public class FtpsClientTest { verify(ftpsClientMock).execPROT("P"); verify(ftpsClientMock).setFileType(FTP.BINARY_FILE_TYPE); verify(ftpsClientMock).setBufferSize(1024 * 1024); - verify(localFileMock).setPath(LOCAL_FILE_PATH); - verify(localFileMock, times(1)).createNewFile(); - verify(ftpsClientMock).retrieveFile(REMOTE_FILE_PATH, osMock); + verify(ftpsClientMock).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), + ArgumentMatchers.any(OutputStream.class)); verify(ftpsClientMock, times(2)).isConnected(); verifyNoMoreInteractions(ftpsClientMock); } diff --git a/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/SftpClientTest.java b/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/SftpClientTest.java index 7f32e8c3..90fb9336 100644 --- a/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/SftpClientTest.java +++ b/datafile-dmaap-client/src/test/java/org/onap/dcaegen2/collectors/datafile/ftp/SftpClientTest.java @@ -17,9 +17,9 @@ package org.onap.dcaegen2.collectors.datafile.ftp; import static java.nio.charset.StandardCharsets.UTF_8; - import static org.apache.commons.io.IOUtils.toByteArray; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.junit.jupiter.api.Assertions.assertTrue; import com.github.stefanbirkner.fakesftpserver.rule.FakeSftpServerRule; @@ -52,7 +52,8 @@ public class SftpClientTest { public final FakeSftpServerRule sftpServer = new FakeSftpServerRule().addUser(USERNAME, PASSWORD); @Test - public void collectFile_withOKresponse() throws DatafileTaskException, IOException, JSchException, SftpException, Exception { + public void collectFile_withOKresponse() + throws DatafileTaskException, IOException, JSchException, SftpException, Exception { FileServerData expectedFileServerData = ImmutableFileServerData.builder().serverAddress("127.0.0.1") .userId(USERNAME).password(PASSWORD).port(sftpServer.getPort()).build(); SftpClient sftpClient = new SftpClient(expectedFileServerData); @@ -67,19 +68,14 @@ public class SftpClientTest { @Test public void collectFile_withWrongUserName_shouldFail() throws IOException, JSchException, SftpException { - FileServerData expectedFileServerData = ImmutableFileServerData.builder().serverAddress("127.0.0.1") - .userId("Wrong").password(PASSWORD).port(sftpServer.getPort()).build(); + FileServerData expectedFileServerData = + ImmutableFileServerData.builder().serverAddress("127.0.0.1").userId("Wrong").password(PASSWORD).build(); SftpClient sftpClient = new SftpClient(expectedFileServerData); sftpServer.putFile(REMOTE_DUMMY_FILE, DUMMY_CONTENT, UTF_8); - String errorMessage = ""; - try { - sftpClient.collectFile(REMOTE_DUMMY_FILE, LOCAL_DUMMY_FILE); - } catch (Exception e) { - errorMessage = e.getMessage(); - } - assertTrue(errorMessage.contains("Auth fail")); + assertThatThrownBy(() -> sftpClient.collectFile(REMOTE_DUMMY_FILE, LOCAL_DUMMY_FILE)) + .hasMessageContaining("Unable to get file from xNF"); } @Test |