diff options
-rw-r--r-- | pom.xml | 2 | ||||
-rw-r--r-- | src/main/docker/logstash/pipeline/logstash.conf | 88 |
2 files changed, 48 insertions, 42 deletions
@@ -397,7 +397,7 @@ <dependency> <groupId>org.onap.policy.engine</groupId> <artifactId>PolicyEngineAPI</artifactId> - <version>1.3.0</version> + <version>1.3.1</version> <exclusions> <exclusion> <groupId>com.google.guava</groupId> diff --git a/src/main/docker/logstash/pipeline/logstash.conf b/src/main/docker/logstash/pipeline/logstash.conf index e6cee9c19..6fe9d9691 100644 --- a/src/main/docker/logstash/pipeline/logstash.conf +++ b/src/main/docker/logstash/pipeline/logstash.conf @@ -61,48 +61,54 @@ input { } filter { - # avoid noise if no entry in the list - if [message] == "[]" { - drop { } - } + if [type] != "dmaap_log" { + #only execute this section for dmaap events from http request + #it doesn't apply to dmaap events from log file - if [http_request_failure] or [@metadata][code] != "200" { - mutate { - add_tag => [ "error" ] - } - } + # avoid noise if no entry in the list + if [message] == "[]" { + drop { } + } - if "dmaap_source" in [tags] { - # - # Dmaap provides a json list, whose items are Strings containing the event - # provided to Dmaap, which itself is an escaped json. - # - # We first need to parse the json as we have to use the plaintext as it cannot - # work with list of events, then split that list into multiple string events, - # that we then transform into json. - # - json { - source => "[message]" - target => "message" - } - ruby { - code => " - for ev in event.get('message', []) - ev.set('@metadata', event.get('@metadata')) - end - " - } - - split { - field => "message" - } - json { - source => "message" - } - mutate { - remove_field => [ "message" ] - } - } + if [http_request_failure] or [@metadata][code] != "200" { + mutate { + add_tag => [ "error" ] + } + } + + if "dmaap_source" in [tags] { + # + # Dmaap provides a json list, whose items are Strings containing the event + # provided to Dmaap, which itself is an escaped json. + # + # We first need to parse the json as we have to use the plaintext as it cannot + # work with list of events, then split that list into multiple string events, + # that we then transform into json. + # + json { + source => "[message]" + target => "message" + } + ruby { + code => " + for ev in event.get('message', []) + ev.set('@metadata', event.get('@metadata')) + end + " + } + + split { + field => "message" + } + json { + source => "message" + } + mutate { + remove_field => [ "message" ] + } + } + } + #now start the common, to both http request and log file events, processing # # Some timestamps are expressed as milliseconds, some are in microseconds @@ -250,7 +256,7 @@ output { elasticsearch { codec => "json" hosts => ["${elasticsearch_base_url}"] - index => "events-%{+YYYY.MM.DD}" # creates daily indexes + index => "events-raw-%{+YYYY.MM.DD}" # creates daily indexes doc_as_upsert => true } } |