aboutsummaryrefslogtreecommitdiffstats
path: root/src/main/java/org/openecomp/dcae/commonFunction
diff options
context:
space:
mode:
Diffstat (limited to 'src/main/java/org/openecomp/dcae/commonFunction')
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java653
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java240
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java213
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java79
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java305
-rw-r--r--src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java170
6 files changed, 985 insertions, 675 deletions
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java b/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java
index 7c1ff22..869a5c7 100644
--- a/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java
+++ b/src/main/java/org/openecomp/dcae/commonFunction/CommonStartup.java
@@ -1,324 +1,341 @@
-/*-
+/*-
* ============LICENSE_START=======================================================
* PROJECT
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
* limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-
-
-import java.io.IOException;
-
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import java.util.Queue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import javax.servlet.ServletException;
-
-import org.apache.catalina.LifecycleException;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.openecomp.dcae.restapi.RestfulCollectorServlet;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-import com.att.nsa.apiServer.ApiServer;
-import com.att.nsa.apiServer.ApiServerConnector;
-import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint;
-import com.att.nsa.cmdLine.NsaCommandLineUtil;
-import com.att.nsa.drumlin.service.framework.DrumlinServlet;
-import com.att.nsa.drumlin.till.nv.rrNvReadable;
-import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
-import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;
-import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jsonschema.exceptions.ProcessingException;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.github.fge.jsonschema.report.ProcessingMessage;
-import com.github.fge.jsonschema.report.ProcessingReport;
-import com.github.fge.jsonschema.util.JsonLoader;
-
-
-public class CommonStartup extends NsaBaseEndpoint implements Runnable
-{
- public static final String kConfig = "c";
-
- public static final String kSetting_Port = "collector.service.port";
- public static final int kDefault_Port = 8080;
-
- public static final String kSetting_SecurePort = "collector.service.secure.port";
- public static final int kDefault_SecurePort = -1;
-
- public static final String kSetting_KeystorePassfile = "collector.keystore.passwordfile";
- public static final String kDefault_KeystorePassfile = "../etc/passwordfile";
- public static final String kSetting_KeystoreFile = "collector.keystore.file.location";
- public static final String kDefault_KeystoreFile = "../etc/keystore";
- public static final String kSetting_KeyAlias = "collector.keystore.alias";
- public static final String kDefault_KeyAlias = "tomcat";
-
- public static final String kSetting_ProcessingConfigs = "collector.hpprocessing";
- protected static final String[] kDefault_ProcessingConfigs = new String[] { "etc/HPProcessingConfig.json" };
-
- public static final String kSetting_MaxQueuedEvents = "collector.inputQueue.maxPending";
- public static final int kDefault_MaxQueuedEvents = 1024*4;
-
- public static final String kSetting_schemaValidator = "collector.schema.checkflag";
- public static final int kDefault_schemaValidator = -1;
-
- public static final String kSetting_schemaFile = "collector.schema.file";
- public static final String kSetting_ExceptionConfig = "exceptionConfig";
-
- public static final String kSetting_dmaapStreamid = "collector.dmaap.streamid";
-
- public static final String kSetting_authflag = "header.authflag";
- public static final int kDefault_authflag = 0;
-
- public static final String kSetting_authid = "header.authid";
- public static final String kSetting_authpwd = "header.authpwd";
- public static final String kSetting_authstore = "header.authstore";
- public static final String kSetting_authlist = "header.authlist";
-
-
-
- public static final Logger inlog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.input" );
- public static final Logger oplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.output");
- public static final Logger eplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.error");
- public static final Logger metriclog = LoggerFactory.getLogger ("com.att.ecomp.metrics" );
-
- public static int schema_Validatorflag = -1;
- public static int authflag = 1;
- public static String schemaFile = null;
- public static String exceptionConfig = null;
- public static String cambriaConfigFile = null;
- private boolean listnerstatus = false;
- static String streamid = null;
-
- private CommonStartup(rrNvReadable settings) throws loadException, missingReqdSetting, IOException, rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, InterruptedException
- {
- final List<ApiServerConnector> connectors = new LinkedList<ApiServerConnector> ();
-
- if (settings.getInt ( kSetting_Port, kDefault_Port ) > 0)
- {
- // http service
- connectors.add (
- new ApiServerConnector.Builder ( settings.getInt ( kSetting_Port, kDefault_Port ) )
- .secure ( false )
- .build ()
- );
- }
-
- // optional https service
- final int securePort = settings.getInt(kSetting_SecurePort, kDefault_SecurePort);
- final String keystoreFile = settings.getString(kSetting_KeystoreFile, kDefault_KeystoreFile);
- final String keystorePasswordFile = settings.getString(kSetting_KeystorePassfile, kDefault_KeystorePassfile);
- final String keyAlias = settings.getString (kSetting_KeyAlias, kDefault_KeyAlias);
-
-
- if (securePort > 0)
- {
- final String kSetting_KeystorePass = readFile(keystorePasswordFile, Charset.defaultCharset());
- connectors.add(new ApiServerConnector.Builder(securePort)
- .secure(true)
- .keystorePassword(kSetting_KeystorePass)
- .keystoreFile(keystoreFile)
- .keyAlias(keyAlias)
- .build());
-
- }
-
- //Reading other config properties
-
- schema_Validatorflag = settings.getInt(kSetting_schemaValidator, kDefault_schemaValidator );
- if (schema_Validatorflag > 0){
- schemaFile = settings.getString(kSetting_schemaFile,null);
- }
- exceptionConfig = settings.getString(kSetting_ExceptionConfig, null);
- authflag = settings.getInt(CommonStartup.kSetting_authflag, CommonStartup.kDefault_authflag );
- String [] currentconffile = settings.getStrings (CommonStartup.kSetting_ProcessingConfigs, CommonStartup.kDefault_ProcessingConfigs ) ;
- cambriaConfigFile= currentconffile[0] ;
- streamid = settings.getString(kSetting_dmaapStreamid,null);
-
- fTomcatServer = new ApiServer.Builder(connectors, new RestfulCollectorServlet(settings))
- .encodeSlashes(true)
- .name("collector")
- .build();
-
-
- //Load override exception map
- CustomExceptionLoader.LoadMap();
- setListnerstatus(true);
- }
-
- public static void main ( String[] args )
- {
- try
- {
- // process command line arguments
- final Map<String, String> argMap = NsaCommandLineUtil.processCmdLine ( args, true );
- final String config = NsaCommandLineUtil.getSetting ( argMap, kConfig, "collector.properties" );
- final URL settingStream = DrumlinServlet.findStream ( config, CommonStartup.class );
-
- final nvReadableStack settings = new nvReadableStack ();
- settings.push ( new nvPropertiesFile ( settingStream ) );
- settings.push ( new nvReadableTable ( argMap ) );
-
- fProcessingInputQueue = new LinkedBlockingQueue<JSONObject> (CommonStartup.kDefault_MaxQueuedEvents);
- CommonStartup cs= new CommonStartup ( settings );
-
- Thread csmain = new Thread(cs);
- csmain.start();
-
- EventProcessor ep = new EventProcessor ();
- Thread epThread=new Thread(ep);
- epThread.start();
-
- //cs.startAndAwait ();
-
- }
- catch ( loadException | missingReqdSetting | IOException | invalidSettingValue | ServletException | InterruptedException e )
- {
- CommonStartup.eplog.error("FATAL_STARTUP_ERROR" + e.getMessage() );
- throw new RuntimeException ( e );
- }
- }
-
- public void run() {
- try {
- fTomcatServer.start ();
- } catch (LifecycleException | IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- fTomcatServer.await ();
- }
-
- public boolean isListnerstatus() {
- return listnerstatus;
- }
-
- public void setListnerstatus(boolean listnerstatus) {
- this.listnerstatus = listnerstatus;
- }
- public static Queue<JSONObject> getProcessingInputQueue ()
- {
- return fProcessingInputQueue;
- }
-
- public static class QueueFullException extends Exception
- {
- private static final long serialVersionUID = 1L;
- }
-
-
- public static void handleEvents ( JSONArray a ) throws QueueFullException, JSONException, IOException
- {
- final Queue<JSONObject> queue = getProcessingInputQueue ();
- try
- {
-
- CommonStartup.metriclog.info("EVENT_PUBLISH_START" );
- for (int i = 0; i < a.length(); i++) {
- if ( !queue.offer ( a.getJSONObject(i) ) ) {
- throw new QueueFullException ();
- }
-
- }
- log.debug("CommonStartup.handleEvents:EVENTS has been published successfully!");
- CommonStartup.metriclog.info("EVENT_PUBLISH_END");
- //ecomplogger.debug(secloggerMessageEnum.SEC_COLLECT_AND_PULIBISH_SUCCESS);
-
- }
- catch ( JSONException e ){
- throw e;
-
- }
- }
-
-
- static String readFile(String path, Charset encoding)
- throws IOException
- {
- byte[] encoded = Files.readAllBytes(Paths.get(path));
- String pwd = new String(encoded);
- return pwd.substring(0,pwd.length()-1);
- }
-
-
- public static String schemavalidate( String jsonData, String jsonSchema) {
- ProcessingReport report = null;
- String result = "false";
-
- try {
- //System.out.println("Applying schema: @<@<"+jsonSchema+">@>@ to data: #<#<"+jsonData+">#>#");
- log.trace("Schema validation for event:" + jsonData);
- JsonNode schemaNode = JsonLoader.fromString(jsonSchema);
- JsonNode data = JsonLoader.fromString(jsonData);
- JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
- JsonSchema schema = factory.getJsonSchema(schemaNode);
- report = schema.validate(data);
- } catch (JsonParseException e) {
- log.error("schemavalidate:JsonParseException for event:" + jsonData );
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- } catch (ProcessingException e) {
- log.error("schemavalidate:Processing exception for event:" + jsonData );
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- } catch (IOException e) {
- log.error("schemavalidate:IO exception; something went wrong trying to read json data for event:" + jsonData);
- System.out.println(e.getMessage());
- return e.getMessage().toString();
- }
- if (report != null) {
- Iterator<ProcessingMessage> iter = report.iterator();
- while (iter.hasNext()) {
- ProcessingMessage pm = iter.next();
- log.trace("Processing Message: "+pm.getMessage());
- }
- result = String.valueOf(report.isSuccess());
- }
- try {
- log.trace("Validation Result:" +result + " Validation report:" + report);
- }
- catch (NullPointerException e){
- log.error("schemavalidate:NullpointerException on report");
- }
- return result;
- }
-
-
-
- static LinkedBlockingQueue<JSONObject> fProcessingInputQueue;
- private static ApiServer fTomcatServer = null;
- private static final Logger log = LoggerFactory.getLogger ( CommonStartup.class );
-}
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.dcae.commonFunction;
+
+
+
+import java.io.IOException;
+
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import java.util.Queue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import javax.servlet.ServletException;
+
+import org.apache.catalina.LifecycleException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.openecomp.dcae.restapi.RestfulCollectorServlet;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+import com.att.nsa.apiServer.ApiServer;
+import com.att.nsa.apiServer.ApiServerConnector;
+import com.att.nsa.apiServer.endpoints.NsaBaseEndpoint;
+import com.att.nsa.cmdLine.NsaCommandLineUtil;
+import com.att.nsa.drumlin.service.framework.DrumlinServlet;
+import com.att.nsa.drumlin.till.nv.rrNvReadable;
+import com.att.nsa.drumlin.till.nv.impl.nvPropertiesFile;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableStack;
+import com.att.nsa.drumlin.till.nv.impl.nvReadableTable;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.invalidSettingValue;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.loadException;
+import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.github.fge.jsonschema.exceptions.ProcessingException;
+import com.github.fge.jsonschema.main.JsonSchema;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import com.github.fge.jsonschema.report.ProcessingMessage;
+import com.github.fge.jsonschema.report.ProcessingReport;
+import com.github.fge.jsonschema.util.JsonLoader;
+
+
+public class CommonStartup extends NsaBaseEndpoint implements Runnable
+{
+ public static final String kConfig = "c";
+
+ public static final String kSetting_Port = "collector.service.port";
+ public static final int kDefault_Port = 8080;
+
+ public static final String kSetting_SecurePort = "collector.service.secure.port";
+ public static final int kDefault_SecurePort = -1;
+
+ public static final String kSetting_KeystorePassfile = "collector.keystore.passwordfile";
+ public static final String kDefault_KeystorePassfile = "../etc/passwordfile";
+ public static final String kSetting_KeystoreFile = "collector.keystore.file.location";
+ public static final String kDefault_KeystoreFile = "../etc/keystore";
+ public static final String kSetting_KeyAlias = "collector.keystore.alias";
+ public static final String kDefault_KeyAlias = "tomcat";
+
+ public static final String kSetting_DmaapConfigs = "collector.dmaapfile";
+ protected static final String[] kDefault_DmaapConfigs = new String[] { "/etc/DmaapConfig.json" };
+
+ public static final String kSetting_MaxQueuedEvents = "collector.inputQueue.maxPending";
+ public static final int kDefault_MaxQueuedEvents = 1024*4;
+
+ public static final String kSetting_schemaValidator = "collector.schema.checkflag";
+ public static final int kDefault_schemaValidator = -1;
+
+ public static final String kSetting_schemaFile = "collector.schema.file";
+ public static final String kSetting_ExceptionConfig = "exceptionConfig";
+
+ public static final String kSetting_dmaapStreamid = "collector.dmaap.streamid";
+
+ public static final String kSetting_authflag = "header.authflag";
+ public static final int kDefault_authflag = -1;
+
+ public static final String kSetting_authid = "header.authid";
+ public static final String kSetting_authpwd = "header.authpwd";
+ public static final String kSetting_authstore = "header.authstore";
+ public static final String kSetting_authlist = "header.authlist";
+
+
+
+ public static final Logger inlog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.input" );
+ public static final Logger oplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.output");
+ public static final Logger eplog = LoggerFactory.getLogger ("org.openecomp.dcae.commonFunction.error");
+ public static final Logger metriclog = LoggerFactory.getLogger ("com.att.ecomp.metrics" );
+
+ public static int schema_Validatorflag = -1;
+ public static int authflag = 1;
+ public static String schemaFile = null;
+ public static String exceptionConfig = null;
+ public static String cambriaConfigFile = null;
+ private boolean listnerstatus = false;
+ static String streamid = null;
+
+ private CommonStartup(rrNvReadable settings) throws loadException, missingReqdSetting, IOException, rrNvReadable.missingReqdSetting, rrNvReadable.invalidSettingValue, ServletException, InterruptedException
+ {
+ final List<ApiServerConnector> connectors = new LinkedList<ApiServerConnector> ();
+
+ if (settings.getInt ( kSetting_Port, kDefault_Port ) > 0)
+ {
+ // http service
+ connectors.add (
+ new ApiServerConnector.Builder ( settings.getInt ( kSetting_Port, kDefault_Port ) )
+ .secure ( false )
+ .build ()
+ );
+ }
+
+ // optional https service
+ final int securePort = settings.getInt(kSetting_SecurePort, kDefault_SecurePort);
+ final String keystoreFile = settings.getString(kSetting_KeystoreFile, kDefault_KeystoreFile);
+ final String keystorePasswordFile = settings.getString(kSetting_KeystorePassfile, kDefault_KeystorePassfile);
+ final String keyAlias = settings.getString (kSetting_KeyAlias, kDefault_KeyAlias);
+
+
+ if (securePort > 0)
+ {
+ final String kSetting_KeystorePass = readFile(keystorePasswordFile, Charset.defaultCharset());
+ connectors.add(new ApiServerConnector.Builder(securePort)
+ .secure(true)
+ .keystorePassword(kSetting_KeystorePass)
+ .keystoreFile(keystoreFile)
+ .keyAlias(keyAlias)
+ .build());
+
+ }
+
+ //Reading other config properties
+
+ schema_Validatorflag = settings.getInt(kSetting_schemaValidator, kDefault_schemaValidator );
+ if (schema_Validatorflag > 0){
+ schemaFile = settings.getString(kSetting_schemaFile,null);
+ }
+ exceptionConfig = settings.getString(kSetting_ExceptionConfig, null);
+ authflag = settings.getInt(CommonStartup.kSetting_authflag, CommonStartup.kDefault_authflag );
+ String [] currentconffile = settings.getStrings (CommonStartup.kSetting_DmaapConfigs, CommonStartup.kDefault_DmaapConfigs ) ;
+ cambriaConfigFile= currentconffile[0] ;
+ streamid = settings.getString(kSetting_dmaapStreamid,null);
+
+ fTomcatServer = new ApiServer.Builder(connectors, new RestfulCollectorServlet(settings))
+ .encodeSlashes(true)
+ .name("collector")
+ .build();
+
+
+ //Load override exception map
+ CustomExceptionLoader.LoadMap();
+ setListnerstatus(true);
+ }
+
+ public static void main ( String[] args )
+ {
+ ExecutorService executor = null;
+ try
+ {
+ // process command line arguments
+ final Map<String, String> argMap = NsaCommandLineUtil.processCmdLine ( args, true );
+ final String config = NsaCommandLineUtil.getSetting ( argMap, kConfig, "collector.properties" );
+ final URL settingStream = DrumlinServlet.findStream ( config, CommonStartup.class );
+
+ final nvReadableStack settings = new nvReadableStack ();
+ settings.push ( new nvPropertiesFile ( settingStream ) );
+ settings.push ( new nvReadableTable ( argMap ) );
+
+ fProcessingInputQueue = new LinkedBlockingQueue<JSONObject> (CommonStartup.kDefault_MaxQueuedEvents);
+
+ VESLogger.setUpEcompLogging();
+
+ CommonStartup cs= new CommonStartup ( settings );
+
+ Thread csmain = new Thread(cs);
+ csmain.start();
+
+
+ EventProcessor ep = new EventProcessor ();
+ //Thread epThread=new Thread(ep);
+ //epThread.start();
+ executor = Executors.newFixedThreadPool(20);
+ executor.execute(ep);
+
+ }
+ catch ( loadException | missingReqdSetting | IOException | invalidSettingValue | ServletException | InterruptedException e )
+ {
+ CommonStartup.eplog.error("FATAL_STARTUP_ERROR" + e.getMessage() );
+ throw new RuntimeException ( e );
+ }
+ finally
+ {
+ // This will make the executor accept no new threads
+ // and finish all existing threads in the queue
+ if (executor != null){
+ executor.shutdown();
+ }
+
+ }
+ }
+
+ public void run() {
+ try {
+ fTomcatServer.start ();
+ } catch (LifecycleException | IOException e) {
+
+ e.printStackTrace();
+ }
+ fTomcatServer.await ();
+ }
+
+ public boolean isListnerstatus() {
+ return listnerstatus;
+ }
+
+ public void setListnerstatus(boolean listnerstatus) {
+ this.listnerstatus = listnerstatus;
+ }
+ public static Queue<JSONObject> getProcessingInputQueue ()
+ {
+ return fProcessingInputQueue;
+ }
+
+ public static class QueueFullException extends Exception
+ {
+ private static final long serialVersionUID = 1L;
+ }
+
+
+ public static void handleEvents ( JSONArray a ) throws QueueFullException, JSONException, IOException
+ {
+ final Queue<JSONObject> queue = getProcessingInputQueue ();
+ try
+ {
+
+ CommonStartup.metriclog.info("EVENT_PUBLISH_START" );
+ for (int i = 0; i < a.length(); i++) {
+ if ( !queue.offer ( a.getJSONObject(i) ) ) {
+ throw new QueueFullException ();
+ }
+
+ }
+ log.debug("CommonStartup.handleEvents:EVENTS has been published successfully!");
+ CommonStartup.metriclog.info("EVENT_PUBLISH_END");
+ //ecomplogger.debug(secloggerMessageEnum.SEC_COLLECT_AND_PULIBISH_SUCCESS);
+
+ }
+ catch ( JSONException e ){
+ throw e;
+
+ }
+ }
+
+
+ static String readFile(String path, Charset encoding)
+ throws IOException
+ {
+ byte[] encoded = Files.readAllBytes(Paths.get(path));
+ String pwd = new String(encoded);
+ return pwd.substring(0,pwd.length()-1);
+ }
+
+
+ public static String schemavalidate( String jsonData, String jsonSchema) {
+ ProcessingReport report = null;
+ String result = "false";
+
+ try {
+ //System.out.println("Applying schema: @<@<"+jsonSchema+">@>@ to data: #<#<"+jsonData+">#>#");
+ log.trace("Schema validation for event:" + jsonData);
+ JsonNode schemaNode = JsonLoader.fromString(jsonSchema);
+ JsonNode data = JsonLoader.fromString(jsonData);
+ JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
+ JsonSchema schema = factory.getJsonSchema(schemaNode);
+ report = schema.validate(data);
+ } catch (JsonParseException e) {
+ log.error("schemavalidate:JsonParseException for event:" + jsonData );
+ System.out.println(e.getMessage());
+ return e.getMessage().toString();
+ } catch (ProcessingException e) {
+ log.error("schemavalidate:Processing exception for event:" + jsonData );
+ System.out.println(e.getMessage());
+ return e.getMessage().toString();
+ } catch (IOException e) {
+ log.error("schemavalidate:IO exception; something went wrong trying to read json data for event:" + jsonData);
+ System.out.println(e.getMessage());
+ return e.getMessage().toString();
+ }
+ if (report != null) {
+ Iterator<ProcessingMessage> iter = report.iterator();
+ while (iter.hasNext()) {
+ ProcessingMessage pm = iter.next();
+ log.trace("Processing Message: "+pm.getMessage());
+ }
+ result = String.valueOf(report.isSuccess());
+ }
+ try {
+ log.debug("Validation Result:" +result + " Validation report:" + report);
+ }
+ catch (NullPointerException e){
+ log.error("schemavalidate:NullpointerException on report");
+ }
+ return result;
+ }
+
+
+
+ static LinkedBlockingQueue<JSONObject> fProcessingInputQueue;
+ private static ApiServer fTomcatServer = null;
+ private static final Logger log = LoggerFactory.getLogger ( CommonStartup.class );
+}
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java b/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java
index 0adf7b4..245f9b4 100644
--- a/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java
+++ b/src/main/java/org/openecomp/dcae/commonFunction/CustomExceptionLoader.java
@@ -1,121 +1,131 @@
-/*-
+/*-
* ============LICENSE_START=======================================================
* PROJECT
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
* limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.util.HashMap;
-
-import java.util.Map.Entry;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonIOException;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSyntaxException;
-
-
-public class CustomExceptionLoader {
-
- public static HashMap<String, JsonArray> map = null;
- private static final Logger log = LoggerFactory.getLogger ( CustomExceptionLoader.class );
-
- //For standalone test
- //LoadMap Invoked from servletSetup
- /*
- public static void main(String[] args) {
-
- System.out.println("CustomExceptionLoader.main --> Arguments -- ExceptionConfig file: " + args[0] + "StatusCode:" + args[1]+ " Error Msg:" + args[2]);
- CommonStartup.exceptionConfig = args[0];
-
- //Read the Custom exception JSON file into map
- LoadMap();
- System.out.println("CustomExceptionLoader.main --> Map info post LoadMap:" + map);
-
- String[] str= LookupMap(args[1],args[2]);
- if (! (str==null)) {
- System.out.println("CustomExceptionLoader.main --> Return from lookup function" + str[0] + "value:" + str[1]);
- }
-
- }
- */
-
- public static void LoadMap () {
-
- map = new HashMap<String, JsonArray>();
-
- try {
- JsonElement root = null;
- root = new JsonParser().parse(new FileReader(CommonStartup.exceptionConfig));
- JsonObject jsonObject = root.getAsJsonObject().get("code").getAsJsonObject();
-
- for (Entry<String, JsonElement> entry : jsonObject.entrySet()) {
- map.put(entry.getKey(), (JsonArray) entry.getValue());
- }
-
- log.debug("CustomExceptionLoader.LoadMap --> Map loaded - " + map);
- } catch (JsonIOException e) {
- e.printStackTrace();
- } catch (JsonSyntaxException e) {
- e.printStackTrace();
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public static String[] LookupMap (String error, String errormsg) {
-
- String[] retarray = null;
-
- log.debug("CustomExceptionLoader.LookupMap -->" + " HTTP StatusCode:" + error + " Msg:" + errormsg);
- try{
-
- JsonArray jarray = map.get(error);
- for (int i = 0; i < jarray.size(); i++) {
-
- JsonElement val = jarray.get(i).getAsJsonObject().get("Reason");
- JsonArray ec = (JsonArray) jarray.get(i).getAsJsonObject().get("ErrorCode");
- log.trace("CustomExceptionLoader.LookupMap Parameter -> Error msg : " + errormsg + " Reason text being matched:" + val);
- if (errormsg.contains(val.toString().replace("\"", ""))){
- log.trace("CustomExceptionLoader.LookupMap Successful! Exception matched to error message StatusCode:" + ec.get(0).toString() + "ErrorMessage:" + ec.get(1).toString());
- retarray = new String[2];
- retarray[0]=ec.get(0).toString();
- retarray[1]=ec.get(1).toString();
- return retarray;
- }
- }
-
- }
- catch (Exception e)
- {
- System.out.println(e.getMessage());
- }
-
- return retarray;
- }
-
-}
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.dcae.commonFunction;
+
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+
+import java.util.Map.Entry;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonIOException;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.JsonSyntaxException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class CustomExceptionLoader {
+
+ public static HashMap<String, JsonArray> map = null;
+ private static final Logger log = LoggerFactory.getLogger ( CustomExceptionLoader.class );
+ //static private final VESLogger log = VESLogger.getLogger(CustomExceptionLoader.class, VESLogger.VES_AGENT);
+
+ //For standalone test
+ //LoadMap Invoked from servletSetup
+ /*
+ public static void main(String[] args) {
+
+ System.out.println("CustomExceptionLoader.main --> Arguments -- ExceptionConfig file: " + args[0] + "StatusCode:" + args[1]+ " Error Msg:" + args[2]);
+ CommonStartup.exceptionConfig = args[0];
+
+ //Read the Custom exception JSON file into map
+ LoadMap();
+ System.out.println("CustomExceptionLoader.main --> Map info post LoadMap:" + map);
+
+ String[] str= LookupMap(args[1],args[2]);
+ if (! (str==null)) {
+ System.out.println("CustomExceptionLoader.main --> Return from lookup function" + str[0] + "value:" + str[1]);
+ }
+
+ }
+ */
+
+ public static void LoadMap () {
+
+ map = new HashMap<String, JsonArray>();
+ FileReader fr = null;
+ try {
+ JsonElement root = null;
+ fr = new FileReader(CommonStartup.exceptionConfig);
+ root = new JsonParser().parse(fr);
+ JsonObject jsonObject = root.getAsJsonObject().get("code").getAsJsonObject();
+
+ for (Entry<String, JsonElement> entry : jsonObject.entrySet()) {
+ map.put(entry.getKey(), (JsonArray) entry.getValue());
+ }
+
+ log.debug("CustomExceptionLoader.LoadMap --> Map loaded - " + map);
+ } catch (JsonIOException e) {
+ e.printStackTrace();
+ } catch (JsonSyntaxException e) {
+ e.printStackTrace();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ if (fr != null) {
+ try {
+ fr.close();
+ } catch (IOException e) {
+ log.error("Error closing file reader stream : " +e.toString());
+ }
+ }
+ }
+ }
+
+ public static String[] LookupMap (String error, String errormsg) {
+
+ String[] retarray = null;
+
+ log.debug("CustomExceptionLoader.LookupMap -->" + " HTTP StatusCode:" + error + " Msg:" + errormsg);
+ try{
+
+ JsonArray jarray = map.get(error);
+ for (int i = 0; i < jarray.size(); i++) {
+
+ JsonElement val = jarray.get(i).getAsJsonObject().get("Reason");
+ JsonArray ec = (JsonArray) jarray.get(i).getAsJsonObject().get("ErrorCode");
+ log.trace("CustomExceptionLoader.LookupMap Parameter -> Error msg : " + errormsg + " Reason text being matched:" + val);
+ if (errormsg.contains(val.toString().replace("\"", ""))){
+ log.trace("CustomExceptionLoader.LookupMap Successful! Exception matched to error message StatusCode:" + ec.get(0).toString() + "ErrorMessage:" + ec.get(1).toString());
+ retarray = new String[2];
+ retarray[0]=ec.get(0).toString();
+ retarray[1]=ec.get(1).toString();
+ return retarray;
+ }
+ }
+
+ }
+ catch (Exception e)
+ {
+ System.out.println(e.getMessage());
+ }
+
+ return retarray;
+ }
+
+}
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java b/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java
index 18e6d59..9cf7fc8 100644
--- a/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java
+++ b/src/main/java/org/openecomp/dcae/commonFunction/DmaapPropertyReader.java
@@ -1,107 +1,118 @@
-/*-
+/*-
* ============LICENSE_START=======================================================
* PROJECT
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
* limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-
-import java.util.HashMap;
-
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonIOException;
-
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSyntaxException;
-
-
-
-public class DmaapPropertyReader {
-
- private static DmaapPropertyReader instance = null;
-
-
- private static final Logger log = LoggerFactory.getLogger ( DmaapPropertyReader.class );
- public HashMap<String, String> dmaap_hash = new HashMap<String, String>();
-
- private DmaapPropertyReader(String CambriaConfigFile) {
-
- try {
- JsonElement root = null;
- root = new JsonParser().parse(new FileReader(CambriaConfigFile));
- JsonArray jsonObject = (JsonArray) root.getAsJsonObject().get("channels");
-
- for (int i = 0; i < jsonObject.size(); i++) {
- log.debug("TOPIC:" + jsonObject.get(i).getAsJsonObject().get("cambria.topic") +
- " HOST-URL:" + jsonObject.get(i).getAsJsonObject().get("cambria.url") +
- " HOSTS:" + jsonObject.get(i).getAsJsonObject().get("cambria.hosts") +
- " PWD:" + jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") +
- " USER:" + jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") +
- " NAME:" + jsonObject.get(i).getAsJsonObject().get("name") );
-
- String convertedname = jsonObject.get(i).getAsJsonObject().get("name").toString().replace("\"","");
- dmaap_hash.put(convertedname + ".cambria.topic", jsonObject.get(i).getAsJsonObject().get("cambria.topic").toString().replace("\"","") );
-
- if (jsonObject.get(i).getAsJsonObject().get("cambria.hosts") != null)
- {
- dmaap_hash.put(convertedname + ".cambria.hosts", jsonObject.get(i).getAsJsonObject().get("cambria.hosts").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("cambria.url") != null)
- {
- dmaap_hash.put(convertedname + ".cambria.url", jsonObject.get(i).getAsJsonObject().get("cambria.url").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") != null)
- {
- dmaap_hash.put(convertedname + ".basicAuthPassword", jsonObject.get(i).getAsJsonObject().get("basicAuthPassword").toString().replace("\"","") );
- }
- if (jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") != null)
- {
- dmaap_hash.put(convertedname+ ".basicAuthUsername", jsonObject.get(i).getAsJsonObject().get("basicAuthUsername").toString().replace("\"","") );
- }
-
- }
- } catch (JsonIOException | JsonSyntaxException | FileNotFoundException e1) {
- e1.printStackTrace();
- log.error("Problem loading Dmaap Channel configuration file: " +e1.toString());
- }
-
-
- }
-
-
-
- public static synchronized DmaapPropertyReader getInstance(String ChannelConfig){
- if (instance == null) {
- instance = new DmaapPropertyReader(ChannelConfig);
- }
- return instance;
- }
-
-
- public String getKeyValue(String HashKey){
- return this.dmaap_hash.get(HashKey);
- }
-}
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.dcae.commonFunction;
+
+
+import java.io.FileNotFoundException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonIOException;
+
+import com.google.gson.JsonParser;
+import com.google.gson.JsonSyntaxException;
+
+
+
+public class DmaapPropertyReader {
+
+ private static DmaapPropertyReader instance = null;
+
+
+ private static final Logger log = LoggerFactory.getLogger ( DmaapPropertyReader.class );
+ //static private final VESLogger log = VESLogger.getLogger(DmaapPropertyReader.class, VESLogger.VES_AGENT);
+
+ public HashMap<String, String> dmaap_hash = new HashMap<String, String>();
+
+ private DmaapPropertyReader(String CambriaConfigFile) {
+
+ FileReader fr = null;
+ try {
+ JsonElement root = null;
+ fr = new FileReader(CambriaConfigFile);
+ root = new JsonParser().parse(fr);
+ JsonArray jsonObject = (JsonArray) root.getAsJsonObject().get("channels");
+
+ for (int i = 0; i < jsonObject.size(); i++) {
+ log.debug("TOPIC:" + jsonObject.get(i).getAsJsonObject().get("cambria.topic") +
+ " HOST-URL:" + jsonObject.get(i).getAsJsonObject().get("cambria.url") +
+ " HOSTS:" + jsonObject.get(i).getAsJsonObject().get("cambria.hosts") +
+ " PWD:" + jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") +
+ " USER:" + jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") +
+ " NAME:" + jsonObject.get(i).getAsJsonObject().get("name") );
+
+ String convertedname = jsonObject.get(i).getAsJsonObject().get("name").toString().replace("\"","");
+ dmaap_hash.put(convertedname + ".cambria.topic", jsonObject.get(i).getAsJsonObject().get("cambria.topic").toString().replace("\"","") );
+
+ if (jsonObject.get(i).getAsJsonObject().get("cambria.hosts") != null)
+ {
+ dmaap_hash.put(convertedname + ".cambria.hosts", jsonObject.get(i).getAsJsonObject().get("cambria.hosts").toString().replace("\"","") );
+ }
+ if (jsonObject.get(i).getAsJsonObject().get("cambria.url") != null)
+ {
+ dmaap_hash.put(convertedname + ".cambria.url", jsonObject.get(i).getAsJsonObject().get("cambria.url").toString().replace("\"","") );
+ }
+ if (jsonObject.get(i).getAsJsonObject().get("basicAuthPassword") != null)
+ {
+ dmaap_hash.put(convertedname + ".basicAuthPassword", jsonObject.get(i).getAsJsonObject().get("basicAuthPassword").toString().replace("\"","") );
+ }
+ if (jsonObject.get(i).getAsJsonObject().get("basicAuthUsername") != null)
+ {
+ dmaap_hash.put(convertedname+ ".basicAuthUsername", jsonObject.get(i).getAsJsonObject().get("basicAuthUsername").toString().replace("\"","") );
+ }
+
+ }
+ } catch (JsonIOException | JsonSyntaxException | FileNotFoundException e1) {
+ e1.printStackTrace();
+ log.error("Problem loading Dmaap Channel configuration file: " +e1.toString());
+ }
+ finally {
+ if (fr != null) {
+ try {
+ fr.close();
+ } catch (IOException e) {
+ log.error("Error closing file reader stream : " +e.toString());
+ }
+ }
+ }
+
+
+ }
+
+
+
+ public static synchronized DmaapPropertyReader getInstance(String ChannelConfig){
+ if (instance == null) {
+ instance = new DmaapPropertyReader(ChannelConfig);
+ }
+ return instance;
+ }
+
+
+ public String getKeyValue(String HashKey){
+ return this.dmaap_hash.get(HashKey);
+ }
+}
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java b/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java
index 0e6f7e7..a5e90b9 100644
--- a/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java
+++ b/src/main/java/org/openecomp/dcae/commonFunction/EventProcessor.java
@@ -20,40 +20,81 @@
package org.openecomp.dcae.commonFunction;
-import org.json.JSONObject;
+import java.text.SimpleDateFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.att.nsa.clock.SaClock;
+import com.att.nsa.logging.LoggingContext;
+import com.att.nsa.logging.log4j.EcompFields;
+
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.TimeZone;
+import java.util.UUID;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
public class EventProcessor implements Runnable {
private static final Logger log = LoggerFactory.getLogger(EventProcessor.class);
+
+ private static HashMap<String, String[]> streamid_hash = new HashMap<String, String[]>();
private JSONObject event = null;
public EventProcessor() {
log.debug("EventProcessor: Default Constructor");
+
+ String list[] = CommonStartup.streamid.split("\\|");
+ for (int i = 0; i < list.length; i++) {
+ String domain = list[i].split("=")[0];
+ //String streamIdList[] = list[i].split("=")[1].split(",");
+ String streamIdList[] = list[i].substring(list[i].indexOf("=") +1).split(",");
+
+ log.debug("Domain: " + domain + " streamIdList:" + Arrays.toString(streamIdList));
+ streamid_hash.put(domain, streamIdList);
+ }
+
}
@Override
public void run() {
try {
+
event = CommonStartup.fProcessingInputQueue.take();
log.info("EventProcessor\tRemoving element: " + event);
-
+
+ //EventPublisher Ep=new EventPublisher();
while (event != null) {
- // As long as the producer is running,
- // we remove elements from the queue.
+ // As long as the producer is running we remove elements from the queue.
- // log.info("EventProcessor\tRemoving element: " +
- // this.queue.remove());
-
- if (CommonStartup.streamid == null) {
+ //UUID uuid = UUID.fromString(event.get("VESuniqueId").toString());
+ String uuid = event.get("VESuniqueId").toString();
+ LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid.toString());
+ localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () );
+
+ log.debug("event.VESuniqueId" + event.get("VESuniqueId") + "event.commonEventHeader.domain:" + event.getJSONObject("event").getJSONObject("commonEventHeader").getString("domain"));
+ String streamIdList[]=streamid_hash.get(event.getJSONObject("event").getJSONObject("commonEventHeader").getString("domain"));
+ log.debug("streamIdList:" + streamIdList);
+
+ if (streamIdList.length == 0) {
log.error("No StreamID defined for publish - Message dropped" + event.toString());
- } else {
- EventPublisher.getInstance(CommonStartup.cambriaConfigFile, CommonStartup.streamid)
- .sendEvent(event.toString(), CommonStartup.streamid);
+ }
+
+ else {
+ for (int i=0; i < streamIdList.length; i++)
+ {
+ log.info("Invoking publisher for streamId:" + streamIdList[i]);
+ this.overrideEvent();
+ EventPublisher.getInstance(streamIdList[i]).sendEvent(event);
+
+ }
}
log.debug("Message published" + event.toString());
event = CommonStartup.fProcessingInputQueue.take();
+ // log.info("EventProcessor\tRemoving element: " + this.queue.remove());
}
} catch (InterruptedException e) {
log.error("EventProcessor InterruptedException" + e.getMessage());
@@ -61,4 +102,20 @@ public class EventProcessor implements Runnable {
}
+
+ public void overrideEvent()
+ {
+ //Set collector timestamp in event payload before publish
+ final Date currentTime = new Date();
+ final SimpleDateFormat sdf = new SimpleDateFormat("EEE, MM dd yyyy hh:mm:ss z");
+ sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+ JSONArray additionalParametersarray = new JSONArray().put(new JSONObject().put("collectorTimeStamp", sdf.format(currentTime)));
+ JSONObject additionalParameter = new JSONObject().put("additionalParameters",additionalParametersarray );
+ JSONObject commonEventHeaderkey = event.getJSONObject("event").getJSONObject("commonEventHeader");
+ commonEventHeaderkey.put("internalHeaderFields", additionalParameter);
+ event.getJSONObject("event").put("commonEventHeader",commonEventHeaderkey);
+ log.debug("Modified event:" + event);
+
+ }
}
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java b/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java
index 4aa6da4..b40fb24 100644
--- a/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java
+++ b/src/main/java/org/openecomp/dcae/commonFunction/EventPublisher.java
@@ -1,136 +1,181 @@
-/*-
+/*-
* ============LICENSE_START=======================================================
* PROJECT
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
* limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.dcae.commonFunction;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.security.GeneralSecurityException;
-import java.net.MalformedURLException;
-
-import com.att.nsa.cambria.client.CambriaBatchingPublisher;
-import com.att.nsa.cambria.client.CambriaClientBuilders;
-
-
-public class EventPublisher {
-
- private static EventPublisher instance = null;
- private static CambriaBatchingPublisher pub = null;
-
- private String streamid = "";
- private static Logger log = LoggerFactory.getLogger(EventPublisher.class.getName());
-
-
-
- private EventPublisher(String CambriaConfigFile, String newstreamid) {
-
- this.streamid = newstreamid;
- try {
- String basicAuthUsername = DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthUsername");
- if (basicAuthUsername != null)
- {
- //log.debug(streamid+".cambria.url" + streamid+".cambria.topic");
- log.debug("URL:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.url") + "TOPIC:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.topic") + "AuthUser:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthUsername") + "Authpwd:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".basicAuthPassword"));
-
- pub = new CambriaClientBuilders.PublisherBuilder ()
- .usingHosts (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.url"))
- .onTopic (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.topic"))
- .usingHttps()
- .authenticatedByHttp ( DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".basicAuthUsername"), DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".basicAuthPassword") )
- .build ();
- }
- else
- {
- //log.debug(streamid+".cambria.url" + streamid+".cambria.topic");
- log.debug("URL:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.url") + "TOPIC:" + DmaapPropertyReader.getInstance(CambriaConfigFile).getKeyValue(streamid+".cambria.topic"));
-
-
- pub = new CambriaClientBuilders.PublisherBuilder ()
- .usingHosts (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.hosts"))
- .onTopic (DmaapPropertyReader.getInstance(CambriaConfigFile).dmaap_hash.get(streamid+".cambria.topic"))
- .build ();
-
- }
- }
- catch(GeneralSecurityException | MalformedURLException e ) {
- log.error("CambriaClientBuilders connection exception : " + e.getMessage());
- }
- catch(Exception e) {
- log.error("CambriaClientBuilders connection exception : " + e.getMessage());
- }
-
- }
-
- public static synchronized EventPublisher getInstance( String CambriaConfigFile, String streamid){
- if (instance == null) {
- instance = new EventPublisher(CambriaConfigFile, streamid);
- }
- return instance;
-
- }
-
- public synchronized void sendEvent(String event, String newstreamid ) {
-
- //Check if streamid changed
- if(! newstreamid.equals(this.streamid)) {
- closePublisher();
- instance = new EventPublisher (CommonStartup.cambriaConfigFile, newstreamid);
- }
-
-
- try {
- int pendingMsgs = pub.send("MyPartitionKey", event.toString());
-
- if(pendingMsgs > 100) {
- log.info("Pending Message Count="+pendingMsgs);
- }
-
- CommonStartup.oplog.info ("Event Published:" + event);
- } catch(IOException ioe) {
- log.error("Unable to publish event:" + event + " Exception:" + ioe.toString());
- }
-
-
-
-
- }
-
-
- public synchronized void closePublisher() {
-
- try {
- final List<?> stuck = pub.close(20, TimeUnit.SECONDS);
- if ( stuck.size () > 0 ) {
- log.error(stuck.size() + " messages unsent" );
- }
- }
- catch(InterruptedException ie) {
- log.error("Caught an Interrupted Exception on Close event");
- }catch(IOException ioe) {
- log.error("Caught IO Exception: " + ioe.toString());
- }
-
- }
-}
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.dcae.commonFunction;
+
+import java.io.IOException;
+
+import org.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import java.security.GeneralSecurityException;
+
+import com.att.nsa.cambria.client.CambriaBatchingPublisher;
+import com.att.nsa.cambria.client.CambriaClientBuilders;
+import com.att.nsa.clock.SaClock;
+import com.att.nsa.logging.LoggingContext;
+import com.att.nsa.logging.log4j.EcompFields;
+
+
+public class EventPublisher {
+
+ private static EventPublisher instance = null;
+ private static CambriaBatchingPublisher pub = null;
+
+ private String streamid = "";
+ private String ueburl="";
+ private String topic="";
+ private String authuser="";
+ private String authpwd="";
+
+ private static Logger log = LoggerFactory.getLogger(EventPublisher.class);
+
+
+ private EventPublisher( String newstreamid) {
+
+ this.streamid = newstreamid;
+ try {
+ ueburl=DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".cambria.url");
+
+ if (ueburl==null){
+ ueburl= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".cambria.hosts");
+ }
+ topic= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).getKeyValue(streamid+".cambria.topic");
+ authuser = DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).getKeyValue(streamid+".basicAuthUsername");
+
+
+ if (authuser != null) {
+ authpwd= DmaapPropertyReader.getInstance(CommonStartup.cambriaConfigFile).dmaap_hash.get(streamid+".basicAuthPassword");
+ }
+ }
+ catch(Exception e) {
+ log.error("CambriaClientBuilders connection reader exception : " + e.getMessage());
+
+ }
+
+ }
+
+
+ public static synchronized EventPublisher getInstance( String streamid){
+ if (instance == null) {
+ instance = new EventPublisher(streamid);
+ }
+ if (!instance.streamid.equals(streamid)){
+ instance.closePublisher();
+ instance = new EventPublisher(streamid);
+ }
+ return instance;
+
+ }
+
+
+ public synchronized void sendEvent(JSONObject event) {
+
+ log.debug("EventPublisher.sendEvent: instance for publish is ready");
+
+
+ if (event.has("VESuniqueId"))
+ {
+ String uuid = event.get("VESuniqueId").toString();
+ LoggingContext localLC = VESLogger.getLoggingContextForThread(uuid.toString());
+ localLC .put ( EcompFields.kBeginTimestampMs, SaClock.now () );
+ log.debug("Removing VESuniqueid object from event");
+ event.remove("VESuniqueId");
+ }
+
+
+
+
+ try {
+
+ if (authuser != null)
+ {
+ log.debug("URL:" + ueburl + "TOPIC:" + topic + "AuthUser:" + authuser + "Authpwd:" + authpwd);
+ pub = new CambriaClientBuilders.PublisherBuilder ()
+ .usingHosts (ueburl)
+ .onTopic (topic)
+ .usingHttps()
+ .authenticatedByHttp (authuser, authpwd )
+ .logSendFailuresAfter(5)
+ // .logTo(log)
+ // .limitBatch(100, 10)
+ .build ();
+ }
+ else
+ {
+
+ log.debug("URL:" + ueburl + "TOPIC:" + topic );
+ pub = new CambriaClientBuilders.PublisherBuilder ()
+ .usingHosts (ueburl)
+ .onTopic (topic)
+ // .logTo(log)
+ .logSendFailuresAfter(5)
+ // .limitBatch(100, 10)
+ .build ();
+
+ }
+
+ int pendingMsgs = pub.send("MyPartitionKey", event.toString());
+ //this.wait(2000);
+
+ if(pendingMsgs > 100) {
+ log.info("Pending Message Count="+pendingMsgs);
+ }
+
+ //closePublisher();
+ log.info("pub.send invoked - no error");
+ CommonStartup.oplog.info ("URL:" + ueburl + "TOPIC:" + topic + "Event Published:" + event);
+
+ } catch(IOException e) {
+ log.error("IOException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString());
+ } catch (GeneralSecurityException e) {
+ // TODO Auto-generated catch block
+ log.error("GeneralSecurityException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString());
+ }
+ catch (IllegalArgumentException e)
+ {
+ log.error("IllegalArgumentException:Unable to publish event:" + event + " streamid:" + this.streamid + " Exception:" + e.toString());
+ }
+
+ }
+
+
+ public synchronized void closePublisher() {
+
+ try {
+ if (pub!= null)
+ {
+ final List<?> stuck = pub.close(20, TimeUnit.SECONDS);
+ if ( stuck.size () > 0 ) {
+ log.error(stuck.size() + " messages unsent" );
+ }
+ }
+ }
+ catch(InterruptedException ie) {
+ log.error("Caught an Interrupted Exception on Close event");
+ }catch(IOException ioe) {
+ log.error("Caught IO Exception: " + ioe.toString());
+ }
+
+ }
+}
diff --git a/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java b/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java
new file mode 100644
index 0000000..7a70013
--- /dev/null
+++ b/src/main/java/org/openecomp/dcae/commonFunction/VESLogger.java
@@ -0,0 +1,170 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * PROJECT
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights
+ * reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.dcae.commonFunction;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+
+import java.util.UUID;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.att.nsa.clock.SaClock;
+
+import com.att.nsa.logging.LoggingContext;
+import com.att.nsa.logging.LoggingContextFactory;
+import com.att.nsa.logging.log4j.EcompFields;
+
+import jline.internal.Log;
+
+
+public class VESLogger {
+
+ public static final String VES_AGENT = "VES_AGENT";
+
+ public static Logger auditLog;
+ public static Logger metricsLog;
+ public static Logger errorLog;
+ public static Logger debugLog;
+
+ // Common LoggingContext
+ private static LoggingContext commonLC = null;
+ // Thread-specific LoggingContext
+ private static LoggingContext threadLC = null;
+ public LoggingContext lc ;
+
+
+
+ /**
+ * Returns the common LoggingContext instance that is the base context
+ * for all subsequent instances.
+ *
+ * @return the common LoggingContext
+ */
+ public static LoggingContext getCommonLoggingContext()
+ {
+ if (commonLC == null)
+ {
+ commonLC = new LoggingContextFactory.Builder().build();
+ final UUID uuid = java.util.UUID.randomUUID();
+
+ commonLC.put("requestId", uuid.toString());
+ }
+ return commonLC;
+ }
+
+ /**
+ * Get a logging context for the current thread that's based on the common logging context.
+ * Populate the context with context-specific values.
+ *
+ * @return a LoggingContext for the current thread
+ */
+ public static LoggingContext getLoggingContextForThread (UUID aUuid)
+ {
+ // note that this operation requires everything from the common context
+ // to be (re)copied into the target context. That seems slow, but it actually
+ // helps prevent the thread from overwriting supposedly common data. It also
+ // should be fairly quick compared with the overhead of handling the actual
+ // service call.
+
+ threadLC = new LoggingContextFactory.Builder().
+ withBaseContext ( getCommonLoggingContext () ).
+ build();
+ // Establish the request-specific UUID, as long as we are here...
+ threadLC.put("requestId", aUuid.toString());
+ threadLC.put ( EcompFields.kEndTimestamp, SaClock.now () );
+
+ return threadLC;
+ }
+
+ /**
+ * Get a logging context for the current thread that's based on the common logging context.
+ * Populate the context with context-specific values.
+ *
+ * @return a LoggingContext for the current thread
+ */
+ public static LoggingContext getLoggingContextForThread (String aUuid)
+ {
+ // note that this operation requires everything from the common context
+ // to be (re)copied into the target context. That seems slow, but it actually
+ // helps prevent the thread from overwriting supposedly common data. It also
+ // should be fairly quick compared with the overhead of handling the actual
+ // service call.
+
+ threadLC = new LoggingContextFactory.Builder().
+ withBaseContext ( getCommonLoggingContext () ).
+ build();
+ // Establish the request-specific UUID, as long as we are here...
+ threadLC.put("requestId", aUuid);
+ threadLC.put ( "statusCode", "COMPLETED" );
+ threadLC.put ( EcompFields.kEndTimestamp, SaClock.now () );
+ return threadLC;
+ }
+ public static void setUpEcompLogging()
+ {
+
+
+ // Create ECOMP Logger instances
+ auditLog = LoggerFactory.getLogger("com.att.ecomp.audit");
+ metricsLog = LoggerFactory.getLogger("com.att.ecomp.metrics");
+ debugLog = LoggerFactory.getLogger("com.att.ecomp.debug");
+ errorLog = LoggerFactory.getLogger("com.att.ecomp.error");
+
+
+ final LoggingContext lc = getCommonLoggingContext();
+
+ String ipAddr = "127.0.0.1";
+ String hostname = "localhost";
+ try
+ {
+ final InetAddress ip = InetAddress.getLocalHost ();
+ hostname = ip.getCanonicalHostName ();
+ ipAddr = ip.getHostAddress();
+ }
+ catch ( UnknownHostException x )
+ {
+ Log.debug(x.getMessage());
+ }
+
+ lc.put ( "serverName", hostname );
+ lc.put ( "serviceName", "VESCollecor" );
+ lc.put ( "statusCode", "RUNNING" );
+ lc.put ( "targetEntity", "NULL");
+ lc.put ( "targetServiceName", "NULL");
+ lc.put ( "server", hostname );
+ lc.put ( "serverIpAddress", ipAddr.toString () );
+
+ // instance UUID is meaningless here, so we just create a new one each time the
+ // server starts. One could argue each new instantiation of the service should
+ // have a new instance ID.
+ lc.put ( "instanceUuid", "" );
+ lc.put ( "severity", "" );
+ lc.put ( EcompFields.kEndTimestamp, SaClock.now () );
+ lc.put("EndTimestamp", SaClock.now ());
+ lc.put("partnerName", "NA");
+
+
+ }
+
+
+}