diff options
Diffstat (limited to 'src/main/docker/logstash')
-rw-r--r-- | src/main/docker/logstash/Dockerfile | 36 | ||||
-rw-r--r-- | src/main/docker/logstash/certs/aafca.pem | 26 | ||||
-rw-r--r-- | src/main/docker/logstash/pipeline/logstash.conf | 269 |
3 files changed, 0 insertions, 331 deletions
diff --git a/src/main/docker/logstash/Dockerfile b/src/main/docker/logstash/Dockerfile deleted file mode 100644 index 73988dc79..000000000 --- a/src/main/docker/logstash/Dockerfile +++ /dev/null @@ -1,36 +0,0 @@ -### -# ============LICENSE_START======================================================= -# ONAP CLAMP -# ================================================================================ -# Copyright (C) 2018 AT&T Intellectual Property. All rights -# reserved. -# ================================================================================ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END============================================ -# =================================================================== -# -### -FROM docker.elastic.co/logstash/logstash-oss:6.6.2 - -MAINTAINER "The Onap Team" -LABEL Description="Logstash image with some plugins needed for the clamp dashboard" - -# Default aaf certificates -COPY certs /certs.d/ - -# remove default pipeline first -COPY pipeline/logstash.conf /usr/share/logstash/pipeline/logstash.conf - -# add plugins needed by aggregation part of the pipeline -RUN /usr/share/logstash/bin/logstash-plugin install logstash-filter-elasticsearch -RUN /usr/share/logstash/bin/logstash-plugin install logstash-filter-prune diff --git a/src/main/docker/logstash/certs/aafca.pem b/src/main/docker/logstash/certs/aafca.pem deleted file mode 100644 index cf12ec4c6..000000000 --- a/src/main/docker/logstash/certs/aafca.pem +++ /dev/null @@ -1,26 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIEVDCCAjygAwIBAgIBAjANBgkqhkiG9w0BAQsFADAsMQ4wDAYDVQQLDAVPU0FB -RjENMAsGA1UECgwET05BUDELMAkGA1UEBhMCVVMwHhcNMTgwNjA1MDg1MTQxWhcN -MjMwNjA1MDg1MTQxWjBHMQswCQYDVQQGEwJVUzENMAsGA1UECgwET05BUDEOMAwG -A1UECwwFT1NBQUYxGTAXBgNVBAMMEGludGVybWVkaWF0ZUNBXzEwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDOXCdZIoWM0EnEEw3qPiVMhAgNolWCTaLt -eI2TjlTQdGDIcXdBZukHdNeOKYzOXRsLt6bLRtd5yARpn53EbzS/dgAyHuaz1HjE -5IPWSFRg9SulfHUmcS+GBt1+KiMJTlOsw6wSA73H/PjjXBbWs/uRJTnaNmV3so7W -DhNW6fHOrbom4p+3FucbB/QAM9b/3l/1LKnRgdXx9tekDnaKN5u3HVBmyOlRhaRp -tscLUCT3jijoGAPRcYZybgrpa0z3iCWquibTO/eLwuO/Dn7yHWau9ZZAHGPBSn9f -TiLKRYV55mNjr3zvs8diTPECFPW8w8sRIH3za1aKHgUC1gd87Yr3AgMBAAGjZjBk -MB0GA1UdDgQWBBQa1FdycErTZ6nr4dxiMbKH0P7vqjAfBgNVHSMEGDAWgBRTVTPy -S+vQUbHBeJrBKDF77+rtSTASBgNVHRMBAf8ECDAGAQH/AgEAMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAlA/RTPy5i09fJ4ytSAmAdytMwEwRaU9F -dshG7LU9q95ODsuM79yJvV9+ISIJZRsBqf5PDv93bUCKKHIYGvR6kNd+n3yx/fce -txDkC/tMj1T9D8TuDKAclGEO9K5+1roOQQFxr4SE6XKb/wjn8OMrCoJ75S0F3htF -LKL85T77JeGeNgSk8JEsZvQvj32m0gv9rxi5jM/Zi5E2vxrBR9T1v3kVvlt6+PSF -BoHXROk5HQmdHxnH+VYQtDHSwj9Xe9aoJMyL0WjYKd//8NUO+VACDOtK4Nia6gy9 -m/n9kMASMw6f9iF4n6t4902RWrRKTYM1CVu5wyVklVbEdE9i6Db4CpL9E8HpBUAP -t44JiNzuFkDmSE/z5XuQIimDt6nzOaSF8pX2KHY2ICDLwpMNUvxzqXD9ECbdspiy -JC2RGq8uARGGl6kQQBKDNO8SrO7rSBPANd1+LgqrKbCrHYfvFgkZPgT5MlQi+E1G -LNT+i6fzZha9ed/L6yjl5Em71flJGFwRZl2pfErZRxp8pLPcznYyIpSjcwnqNCRC -orhlp8nheiODC3oO3AFHDiFgUqvm8hgpnT2cPk2lpU2VY1TcZ8sW5qUDCxINIPcW -u1SAsa87IJK3vEzPZfTCs/S6XThoqRfXj0c0Rahj7YFRi/PqIPY0ejwdtmZ9m9pZ -8Lb0GYmlo44= ------END CERTIFICATE----- diff --git a/src/main/docker/logstash/pipeline/logstash.conf b/src/main/docker/logstash/pipeline/logstash.conf deleted file mode 100644 index 5c1d47d18..000000000 --- a/src/main/docker/logstash/pipeline/logstash.conf +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -input { - http_poller { - urls => { - event_queue => { - method => get - url => "${dmaap_base_url}/events/${event_topic}/${dmaap_consumer_group}/${dmaap_consumer_id}?timeout=15000" - headers => { - Accept => "application/json" - } - add_field => { "topic" => "${event_topic}" } - type => "dmaap_event" - } - notification_queue => { - method => get - url => "${dmaap_base_url}/events/${notification_topic}/${dmaap_consumer_group}/${dmaap_consumer_id}?timeout=15000" - headers => { - Accept => "application/json" - } - add_field => { "topic" => "${notification_topic}" } - type => "dmaap_notification" - } - request_queue => { - method => get - url => "${dmaap_base_url}/events/${request_topic}/${dmaap_consumer_group}/${dmaap_consumer_id}?timeout=15000" - headers => { - Accept => "application/json" - } - add_field => { "topic" => "${request_topic}" } - type => "dmaap_request" - } - } - socket_timeout => 30 - request_timeout => 30 - codec => "plain" - schedule => { "every" => "1m" } - cacert => "/certs.d/aafca.pem" - } -} - -input { - file { - path => [ - "/log-input/*" - ] - type => "dmaap_log" - codec => "json" - } -} - -filter { - if [type] != "dmaap_log" { - #only execute this section for dmaap events from http request - #it doesn't apply to dmaap events from log file - - # avoid noise if no entry in the list - if [message] == "[]" { - drop { } - } - - if [http_request_failure] or [@metadata][code] != "200" { - mutate { - add_tag => [ "error" ] - } - } - - if "dmaap_source" in [tags] { - # - # Dmaap provides a json list, whose items are Strings containing the event - # provided to Dmaap, which itself is an escaped json. - # - # We first need to parse the json as we have to use the plaintext as it cannot - # work with list of events, then split that list into multiple string events, - # that we then transform into json. - # - json { - source => "[message]" - target => "message" - } - ruby { - code => " - for ev in event.get('message', []) - ev.set('@metadata', event.get('@metadata')) - end - " - } - - split { - field => "message" - } - json { - source => "message" - } - mutate { - remove_field => [ "message" ] - } - } - } - #now start the common, to both http request and log file events, processing - - # - # Some timestamps are expressed as milliseconds, some are in microseconds - # - if [closedLoopAlarmStart] { - ruby { - code => " - if event.get('closedLoopAlarmStart').to_s.to_i(10) > 9999999999999 - event.set('closedLoopAlarmStart', event.get('closedLoopAlarmStart').to_s.to_i(10) / 1000) - else - event.set('closedLoopAlarmStart', event.get('closedLoopAlarmStart').to_s.to_i(10)) - end - " - } - date { - match => [ "closedLoopAlarmStart", UNIX_MS ] - target => "closedLoopAlarmStart" - } - } - - if [closedLoopAlarmEnd] { - ruby { - code => " - if event.get('closedLoopAlarmEnd').to_s.to_i(10) > 9999999999999 - event.set('closedLoopAlarmEnd', event.get('closedLoopAlarmEnd').to_s.to_i(10) / 1000) - else - event.set('closedLoopAlarmEnd', event.get('closedLoopAlarmEnd').to_s.to_i(10)) - end - " - } - date { - match => [ "closedLoopAlarmEnd", UNIX_MS ] - target => "closedLoopAlarmEnd" - } - - } - - - # - # Notification time are expressed under the form "yyyy-MM-dd HH:mm:ss", which - # is close to ISO8601, but lacks of T as spacer: "yyyy-MM-ddTHH:mm:ss" - # - if [notificationTime] { - mutate { - gsub => [ "notificationTime", " ", "T" ] - } - date { - match => [ "notificationTime", ISO8601 ] - target => "notificationTime" - } - } - - - # - # Renaming some fields for readability - # - if [AAI][generic-vnf.vnf-name] { - mutate { - add_field => { "vnfName" => "%{[AAI][generic-vnf.vnf-name]}" } - } - } - if [AAI][generic-vnf.vnf-type] { - mutate { - add_field => { "vnfType" => "%{[AAI][generic-vnf.vnf-type]}" } - } - } - if [AAI][vserver.vserver-name] { - mutate { - add_field => { "vmName" => "%{[AAI][vserver.vserver-name]}" } - } - } - if [AAI][complex.city] { - mutate { - add_field => { "locationCity" => "%{[AAI][complex.city]}" } - } - } - if [AAI][complex.state] { - mutate { - add_field => { "locationState" => "%{[AAI][complex.state]}" } - } - } - - - # - # Adding some flags to ease aggregation - # - if [closedLoopEventStatus] =~ /(?i)ABATED/ { - mutate { - add_field => { "flagAbated" => "1" } - } - } - if [notification] =~ /^.*?(?:\b|_)FINAL(?:\b|_).*?(?:\b|_)FAILURE(?:\b|_).*?$/ { - mutate { - add_field => { "flagFinalFailure" => "1" } - } - } - - - if "error" not in [tags] { - # - # Creating data for a secondary index - # - clone { - clones => [ "event-cl-aggs" ] - add_tag => [ "event-cl-aggs" ] - } - - if "event-cl-aggs" in [tags] { - # - # we only need a few fields for aggregations; remove all fields from clone except : - # vmName,vnfName,vnfType,requestID,closedLoopAlarmStart, closedLoopControlName,closedLoopAlarmEnd,abated,nbrDmaapevents,finalFailure - # - prune { - whitelist_names => ["^@.*$","^topic$","^type$","^tags$","^flagFinalFailure$","^flagAbated$","^locationState$","^locationCity$","^vmName$","^vnfName$","^vnfType$","^requestID$","^closedLoopAlarmStart$","^closedLoopControlName$","^closedLoopAlarmEnd$","^target$","^target_type$","^triggerSourceName$","^policyScope$","^policyName$","^policyVersion$"] - } - - } - } -} - -output { - stdout { - codec => rubydebug { metadata => true } - } - - if "error" in [tags] { - elasticsearch { - codec => "json" - hosts => ["${elasticsearch_base_url}"] - user => "${LOGSTASH_USR}" - password => "${LOGSTASH_PWD}" - index => "errors-%{+YYYY.MM.DD}" - doc_as_upsert => true - } - - } else if "event-cl-aggs" in [tags] { - elasticsearch { - codec => "json" - hosts => ["${elasticsearch_base_url}"] - user => "${LOGSTASH_USR}" - password => "${LOGSTASH_PWD}" - document_id => "%{requestID}" - index => "events-cl-%{+YYYY.MM.DD}" # creates daily indexes for control loop - doc_as_upsert => true - action => "update" - } - - } else { - elasticsearch { - codec => "json" - hosts => ["${elasticsearch_base_url}"] - user => "${LOGSTASH_USR}" - password => "${LOGSTASH_PWD}" - index => "events-raw-%{+YYYY.MM.DD}" # creates daily indexes - doc_as_upsert => true - } - } -} |