aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorabatos <adrian.batos-parac@amdocs.com>2017-05-12 14:11:30 -0400
committerabatos <adrian.batos-parac@amdocs.com>2017-05-12 14:20:46 -0400
commit60f7f5e11d7d0d7870a4650956921bd1afa309fd (patch)
tree5ce86d00dbf3a57dd9076060967ae355d4454c03 /src
parentb0478eb10db68313fcf5d0a989c5eff25716052a (diff)
Initial ONAP Synapse commit
Change-Id: I3a0ed659dbb8f8faeeb54093b5d6f10414cd886e Signed-off-by: abatos <adrian.batos-parac@amdocs.com>
Diffstat (limited to 'src')
-rw-r--r--src/main/ajsc/data-router_v1/data-router/v1/conf/echoService.groovy12
-rw-r--r--src/main/ajsc/data-router_v1/data-router/v1/docs/README.txt1
-rw-r--r--src/main/ajsc/data-router_v1/data-router/v1/lib/README.txt1
-rw-r--r--src/main/ajsc/data-router_v1/data-router/v1/props/module.props1
-rw-r--r--src/main/ajsc/data-router_v1/data-router/v1/routes/echoService.route5
-rw-r--r--src/main/assemble/ajsc_module_assembly.xml66
-rw-r--r--src/main/assemble/ajsc_props_assembly.xml23
-rw-r--r--src/main/assemble/ajsc_runtime_assembly.xml44
-rw-r--r--src/main/config/ajsc-chef.jksbin0 -> 5256 bytes
-rw-r--r--src/main/config/ajsc-jetty.xml114
-rw-r--r--src/main/config/ajsc-override-web.xml39
-rw-r--r--src/main/config/ajscJetty.jksbin0 -> 3736 bytes
-rw-r--r--src/main/config/cadi.properties36
-rw-r--r--src/main/config/jul-redirect.properties13
-rw-r--r--src/main/config/keyfile27
-rw-r--r--src/main/config/runner-web.xml97
-rw-r--r--src/main/docker/Dockerfile26
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/AaiEventEntity.java315
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/AggregationEntity.java124
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/DocumentStoreDataEntity.java35
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/OxmEntityDescriptor.java126
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/PolicyResponse.java72
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/SuggestionSearchEntity.java281
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/TopographicalEntity.java191
-rw-r--r--src/main/java/org/openecomp/datarouter/entity/UebEventHeader.java169
-rw-r--r--src/main/java/org/openecomp/datarouter/exception/BaseDataRouterException.java89
-rw-r--r--src/main/java/org/openecomp/datarouter/exception/DataRouterError.java106
-rw-r--r--src/main/java/org/openecomp/datarouter/logging/DataRouterMsgs.java161
-rw-r--r--src/main/java/org/openecomp/datarouter/logging/EntityEventPolicyMsgs.java218
-rw-r--r--src/main/java/org/openecomp/datarouter/policy/EntityEventPolicy.java1162
-rw-r--r--src/main/java/org/openecomp/datarouter/policy/EntityEventPolicyConfig.java129
-rw-r--r--src/main/java/org/openecomp/datarouter/service/EchoService.java97
-rw-r--r--src/main/java/org/openecomp/datarouter/util/CrossEntityReference.java97
-rw-r--r--src/main/java/org/openecomp/datarouter/util/DataRouterConstants.java57
-rw-r--r--src/main/java/org/openecomp/datarouter/util/DataRouterProperties.java53
-rw-r--r--src/main/java/org/openecomp/datarouter/util/EntityOxmReferenceHelper.java62
-rw-r--r--src/main/java/org/openecomp/datarouter/util/ExternalOxmModelProcessor.java37
-rw-r--r--src/main/java/org/openecomp/datarouter/util/FileWatcher.java49
-rw-r--r--src/main/java/org/openecomp/datarouter/util/NodeUtils.java46
-rw-r--r--src/main/java/org/openecomp/datarouter/util/OxmModelLoader.java166
-rw-r--r--src/main/java/org/openecomp/datarouter/util/RouterServiceUtil.java226
-rw-r--r--src/main/java/org/openecomp/datarouter/util/SearchSuggestionPermutation.java89
-rw-r--r--src/main/java/org/openecomp/datarouter/util/Version.java29
-rw-r--r--src/main/java/org/openecomp/datarouter/util/VersionedOxmEntities.java352
-rw-r--r--src/main/resources/entitysearch_schema.json35
-rw-r--r--src/main/resources/logging/DataRouterMsgs.properties135
-rw-r--r--src/main/resources/logging/EntityEventPolicyMsgs.properties122
-rw-r--r--src/main/resources/topographysearch_schema.json8
-rw-r--r--src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context1
-rw-r--r--src/main/runtime/context/default#0.context1
-rw-r--r--src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json1
-rw-r--r--src/main/runtime/shiroRole/ajscadmin.json1
-rw-r--r--src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--src/main/runtime/shiroRole/contextadmin#default.json1
-rw-r--r--src/main/runtime/shiroUser/ajsc.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#ajscadmin.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json1
58 files changed, 5353 insertions, 0 deletions
diff --git a/src/main/ajsc/data-router_v1/data-router/v1/conf/echoService.groovy b/src/main/ajsc/data-router_v1/data-router/v1/conf/echoService.groovy
new file mode 100644
index 0000000..ad7670e
--- /dev/null
+++ b/src/main/ajsc/data-router_v1/data-router/v1/conf/echoService.groovy
@@ -0,0 +1,12 @@
+beans{
+ xmlns cxf: "http://camel.apache.org/schema/cxf"
+ xmlns jaxrs: "http://cxf.apache.org/jaxrs"
+ xmlns util: "http://www.springframework.org/schema/util"
+
+ echoService(org.openecomp.datarouter.service.EchoService)
+
+
+ util.list(id: 'echoServices') {
+ ref(bean:'echoService')
+ }
+}
diff --git a/src/main/ajsc/data-router_v1/data-router/v1/docs/README.txt b/src/main/ajsc/data-router_v1/data-router/v1/docs/README.txt
new file mode 100644
index 0000000..3707179
--- /dev/null
+++ b/src/main/ajsc/data-router_v1/data-router/v1/docs/README.txt
@@ -0,0 +1 @@
+Place any docs here that you want to access within the ajsc upon deployment of your service.
diff --git a/src/main/ajsc/data-router_v1/data-router/v1/lib/README.txt b/src/main/ajsc/data-router_v1/data-router/v1/lib/README.txt
new file mode 100644
index 0000000..639e21b
--- /dev/null
+++ b/src/main/ajsc/data-router_v1/data-router/v1/lib/README.txt
@@ -0,0 +1 @@
+3rd party JAR's needed by your jars (if any) for a ajsc deployment package go here... \ No newline at end of file
diff --git a/src/main/ajsc/data-router_v1/data-router/v1/props/module.props b/src/main/ajsc/data-router_v1/data-router/v1/props/module.props
new file mode 100644
index 0000000..17ebc08
--- /dev/null
+++ b/src/main/ajsc/data-router_v1/data-router/v1/props/module.props
@@ -0,0 +1 @@
+EXAMPLE.PROPERTY=EXAMLE_VALUE \ No newline at end of file
diff --git a/src/main/ajsc/data-router_v1/data-router/v1/routes/echoService.route b/src/main/ajsc/data-router_v1/data-router/v1/routes/echoService.route
new file mode 100644
index 0000000..b81cbbd
--- /dev/null
+++ b/src/main/ajsc/data-router_v1/data-router/v1/routes/echoService.route
@@ -0,0 +1,5 @@
+<route xmlns="http://camel.apache.org/schema/spring" trace="true">
+ <from uri="att-dme2-servlet:///__module_ajsc_namespace_name__/__module_ajsc_namespace_version__/echo-service/?matchOnUriPrefix=true" />
+ <to uri="cxfbean:echoServices" />
+</route>
+
diff --git a/src/main/assemble/ajsc_module_assembly.xml b/src/main/assemble/ajsc_module_assembly.xml
new file mode 100644
index 0000000..4ec4e28
--- /dev/null
+++ b/src/main/assemble/ajsc_module_assembly.xml
@@ -0,0 +1,66 @@
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>${version}</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/routes/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/routes/</outputDirectory>
+ <includes>
+ <include>*.route</include>
+ </includes>
+
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/docs/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/docs/</outputDirectory>
+ <includes>
+ <include>*.*</include>
+ <!-- <include>*.vm</include> -->
+ </includes>
+
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/lib/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+
+ </fileSet>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/extJars/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/extJars/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+
+ <!-- also try to grab outputs from the "jar" plugin's package phase -->
+ <fileSet>
+ <directory>${project.basedir}/target/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/conf/</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/conf/</outputDirectory>
+ <includes>
+ <include>*.*</include>
+ </includes>
+
+ </fileSet>
+ </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_props_assembly.xml b/src/main/assemble/ajsc_props_assembly.xml
new file mode 100644
index 0000000..5b8a6fa
--- /dev/null
+++ b/src/main/assemble/ajsc_props_assembly.xml
@@ -0,0 +1,23 @@
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>${version}_properties</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-ajsc/props</directory>
+ <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/props/</outputDirectory>
+ <includes>
+ <include>*.props</include>
+ </includes>
+
+ </fileSet>
+
+ </fileSets>
+
+</assembly>
+
diff --git a/src/main/assemble/ajsc_runtime_assembly.xml b/src/main/assemble/ajsc_runtime_assembly.xml
new file mode 100644
index 0000000..e37d366
--- /dev/null
+++ b/src/main/assemble/ajsc_runtime_assembly.xml
@@ -0,0 +1,44 @@
+<assembly
+ xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>runtimeEnvironment</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/context/</directory>
+ <outputDirectory>runtime/context/</outputDirectory>
+ <includes>
+ <include>*.context</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/serviceProperties/</directory>
+ <outputDirectory>runtime/serviceProperties/</outputDirectory>
+ <includes>
+ <include>*.props</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroRole</directory>
+ <outputDirectory>runtime/shiroRole/</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroUser</directory>
+ <outputDirectory>runtime/shiroUser/</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet><fileSet>
+ <directory>${project.basedir}/target/versioned-runtime/shiroUserRole</directory>
+ <outputDirectory>runtime/shiroUserRole</outputDirectory>
+ <includes>
+ <include>*.json</include>
+ </includes>
+ </fileSet>
+ </fileSets>
+</assembly> \ No newline at end of file
diff --git a/src/main/config/ajsc-chef.jks b/src/main/config/ajsc-chef.jks
new file mode 100644
index 0000000..aeca770
--- /dev/null
+++ b/src/main/config/ajsc-chef.jks
Binary files differ
diff --git a/src/main/config/ajsc-jetty.xml b/src/main/config/ajsc-jetty.xml
new file mode 100644
index 0000000..9d597f0
--- /dev/null
+++ b/src/main/config/ajsc-jetty.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+<!-- Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. -->
+<Configure id="ajsc-server" class="org.eclipse.jetty.server.Server">
+ <!-- DO NOT REMOVE!!!! This is setting up the AJSC Context -->
+ <New id="ajscContext" class="org.eclipse.jetty.webapp.WebAppContext">
+ <Set name="contextPath"><SystemProperty name="AJSC_CONTEXT_PATH" /></Set>
+ <Set name="extractWAR">true</Set>
+ <Set name="tempDirectory"><SystemProperty name="AJSC_TEMP_DIR" /></Set>
+ <Set name="war"><SystemProperty name="AJSC_WAR_PATH" /></Set>
+ <Set name="descriptor"><SystemProperty name="AJSC_HOME" />/etc/runner-web.xml</Set>
+ <Set name="overrideDescriptor"><SystemProperty name="AJSC_HOME" />/etc/ajsc-override-web.xml</Set>
+ <Set name="throwUnavailableOnStartupException">true</Set>
+ <Set name="servletHandler">
+ <New class="org.eclipse.jetty.servlet.ServletHandler">
+ <Set name="startWithUnavailable">false</Set>
+ </New>
+ </Set>
+ <Set name="extraClasspath"><SystemProperty name="AJSC_HOME" />/extJars/json-20131018.jar</Set>
+ </New>
+
+ <Set name="handler">
+ <New id="Contexts"
+ class="org.eclipse.jetty.server.handler.ContextHandlerCollection">
+ <Set name="Handlers">
+ <Array type="org.eclipse.jetty.webapp.WebAppContext">
+ <Item>
+ <Ref refid="ajscContext" />
+ </Item>
+ </Array>
+ </Set>
+ </New>
+ </Set>
+
+ <Call name="addBean">
+ <Arg>
+ <New id="DeploymentManager" class="org.eclipse.jetty.deploy.DeploymentManager">
+ <Set name="contexts">
+ <Ref refid="Contexts" />
+ </Set>
+ <Call id="extAppHotDeployProvider" name="addAppProvider">
+ <Arg>
+ <New class="org.eclipse.jetty.deploy.providers.WebAppProvider">
+ <Set name="monitoredDirName"><SystemProperty name="AJSC_HOME" />/extApps</Set>
+ <Set name="scanInterval">10</Set>
+ <Set name="extractWars">true</Set>
+ </New>
+ </Arg>
+ </Call>
+ </New>
+ </Arg>
+ </Call>
+
+ <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory">
+ <Set name="keyStorePath">file:<SystemProperty name="CONFIG_HOME" />/auth/tomcat_keystore</Set>
+ <Set name="KeyStorePassword">
+ <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
+ <Arg><SystemProperty name="KEY_STORE_PASSWORD" /></Arg>
+ </Call>
+ </Set>
+ <Set name="KeyManagerPassword">
+ <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
+ <Arg><SystemProperty name="KEY_MANAGER_PASSWORD" /></Arg>
+ </Call>
+ </Set>
+ <Set name="needClientAuth">true</Set>
+ <Set name="wantClientAuth">true</Set>
+ </New>
+
+ <Call id="sslConnector" name="addConnector">
+ <Arg>
+ <New class="org.eclipse.jetty.server.ServerConnector">
+ <Arg name="server">
+ <Ref refid="ajsc-server" />
+ </Arg>
+ <Arg name="factories">
+ <Array type="org.eclipse.jetty.server.ConnectionFactory">
+ <Item>
+ <New class="org.eclipse.jetty.server.SslConnectionFactory">
+ <Arg name="next">http/1.1</Arg>
+ <Arg name="sslContextFactory">
+ <Ref refid="sslContextFactory" />
+ </Arg>
+ </New>
+ </Item>
+ <Item>
+ <New class="org.eclipse.jetty.server.HttpConnectionFactory">
+ <Arg name="config">
+ <New class="org.eclipse.jetty.server.HttpConfiguration">
+ <Call name="addCustomizer">
+ <Arg>
+ <New class="org.eclipse.jetty.server.SecureRequestCustomizer" />
+ </Arg>
+ </Call>
+ </New>
+ </Arg>
+ </New>
+ </Item>
+ </Array>
+ </Arg>
+ <Set name="port"><SystemProperty name="AJSC_HTTPS_PORT" default="9502" /></Set>
+ <Set name="idleTimeout">30000</Set>
+ </New>
+ </Arg>
+ </Call>
+
+ <Get name="ThreadPool">
+ <Set name="minThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MIN" /></Set>
+ <Set name="maxThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MAX" /></Set>
+ <Set name="idleTimeout"><SystemProperty name="AJSC_JETTY_IDLETIME_MAX" /></Set>
+ <Set name="detailedDump">false</Set>
+ </Get>
+
+</Configure>
diff --git a/src/main/config/ajsc-override-web.xml b/src/main/config/ajsc-override-web.xml
new file mode 100644
index 0000000..f6d69ea
--- /dev/null
+++ b/src/main/config/ajsc-override-web.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ metadata-complete="false" version="3.0">
+
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/services/*</url-pattern>
+ </filter-mapping>
+ <filter-mapping>
+ <filter-name>InterceptorFilter</filter-name>
+ <url-pattern>/rest/*</url-pattern>
+ </filter-mapping>
+
+ <filter-mapping>
+ <filter-name>springSecurityFilterChain</filter-name>
+ <url-pattern>/*</url-pattern>
+ </filter-mapping>
+
+ <servlet-mapping>
+ <servlet-name>ManagementServlet</servlet-name>
+ <url-pattern>/mgmt</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>RestletServlet</servlet-name>
+ <url-pattern>/rest/*</url-pattern>
+ </servlet-mapping>
+
+ <servlet-mapping>
+ <servlet-name>CamelServlet</servlet-name>
+ <url-pattern>/*</url-pattern>
+ </servlet-mapping>
+
+
+</web-app> \ No newline at end of file
diff --git a/src/main/config/ajscJetty.jks b/src/main/config/ajscJetty.jks
new file mode 100644
index 0000000..48cdbff
--- /dev/null
+++ b/src/main/config/ajscJetty.jks
Binary files differ
diff --git a/src/main/config/cadi.properties b/src/main/config/cadi.properties
new file mode 100644
index 0000000..a1d56d6
--- /dev/null
+++ b/src/main/config/cadi.properties
@@ -0,0 +1,36 @@
+#This properties file is used for defining AAF properties related to the CADI framework. This file is used for running AAF framework
+
+#In order to test functionality of cadi-ajsc-plugin locally cross domain cookie. Cadi "should" find your hostname for you.
+#However, we have seen some situations where this fails. A Local testing
+#modification can include modifying your hosts file so that you can use "mywebserver.att.com" for your localhost in order
+#to test/verify GLO functionality locally. If you are on a Windows machine, you will already have a machine name associated with
+#it that will utilize an AT&T domain such as "sbc.com". You may need to add your domain to this as a comma separated list depending
+#upon your particular machine domain. This property is commented out as cadi SHOULD find your machine name. With version 1.2.1 of cadi,
+#it appears to resolve Mac machine names as well, now. But, this can be somewhat inconsistent depending on your specific working envrironment.
+hostname=mywebserver.att.com
+
+#Setting csp_domain to PROD will allow for testing using your attuid and password through GLO.
+csp_domain=PROD
+csp_devl_localhost=true
+
+basic_realm=csp.att.com
+#basic_realm=aaf.att.com
+basic_warn=TRUE
+
+cadi_loglevel=WARN
+cadi_keyfile=target/swm/package/nix/dist_files/appl/data-router/etc/keyfile
+
+# Configure AAF
+#These are dummy values add appropriate values required
+aaf_url=url
+
+#AJSC - MECHID
+#These are dummy values add appropriate values required
+aaf_id=dummyid@ajsc.att.com
+aaf_password=enc:277edqJCjT0RlUI3BtbDQa-3Ha-CQGd
+aaf_timeout=5000
+aaf_clean_interval=30000
+aaf_user_expires=5000
+aaf_high_count=1000
+
+
diff --git a/src/main/config/jul-redirect.properties b/src/main/config/jul-redirect.properties
new file mode 100644
index 0000000..8b6624d
--- /dev/null
+++ b/src/main/config/jul-redirect.properties
@@ -0,0 +1,13 @@
+
+# Bridge JUL->slf4j Logging Configuration File
+#
+# This file bridges the JUL logging infrastructure into
+# SLF4J so JUL logs go to logback implementation provided
+# in this project. SLF4J also captures log4j and has
+# other framework options as well providing a common
+# logging infrastructure for capturing all logs from different
+# libraries using different frameworks in one place.
+
+# Global properties
+handlers=org.slf4j.bridge.SLF4JBridgeHandler
+.level= ALL
diff --git a/src/main/config/keyfile b/src/main/config/keyfile
new file mode 100644
index 0000000..882e86a
--- /dev/null
+++ b/src/main/config/keyfile
@@ -0,0 +1,27 @@
+ZuIwp0TkyVPDeX1Up-8JtkMWvjsCpoiu1_VKeWrtrvxunvAke8_tiFyHPPyb2nkhepFYj6tXzpfS
+rGz5XF_TH9NbsKaP8u0HV5clz2WriYQRvHS85vjY7hXxkpFuLb7zkLAPqTyIDpj7FiW61NzsRUAq
+TM8jH16jr7mBNnb56w24mNGOwznMPcIZKcjgZU1ekaPDFpWyhQElU7Y0q_94P_Gkk45r66Hj22sU
+OiOaaftmudZlswLw8-8Zaakqf2yW9HjMVfuYCwSodBHCW5rdB3Ctb5W36rnD_AQco3Ky2PgPmqvk
+QkJYuUHpbuDqVHqLOajlKSIGMTIqAIBg51fRaaONtD-Q5xzY8E5wO1YWTLKcP5tsNvUpzM8Wu3NS
+ynpGpUcvlTqWWsGzTbzOyamyKkdNdx97sSqjM25Zh1-ps48h6cddGYWpab7SUvqRCS11QBUyLTry
+2iwTEHMhHRIbo7PO99ALQfuq9gI1zKGfurJdvLBeBaFs5SCF0AiCZ3WcDO8Rv3HpxVZ2_ShbDxb0
+eMoO6SotXu51fj8Y3-WqsfZziQyEsHyqpg5uQ6yUtz01h5YHLEoVuotF1U4agmQR6kEkYk-wNOiZ
+v-8gaA9gtbLoAdKhuKFxQgQLNMf6GzVzZNujbmDzLoZAP_mXAv29aBPaf64Ugzv-Oa5GZdBgD-Xd
+_pahML-ionw99r0TnkpShYmDqMKhMdjaP3m87WIAZkIB-L-VTyKcEsJ4340VSzCOsv3waiM0S89u
+4cMcG5y-PLY8IoipIlLUPTWD3SjcQ9DV1Dt3T5KjdWLsj48D3W4K4e9PB8yxs0gtUjgVUR2_xEir
+G5eDO9Ac1eHFWGDFFP0SgG-TbHJUKlvy9mwLzmU0fC3xPjhqmIr-v0HxF7HN-tmb1LHDorno8tSN
+u7kUGcKSchIiFfvkd066crUb2mH7PnXTaWmAjyVj9VsBExFUYEdpHMAV4sAP9-RxZGDRt46UhrDK
+QZvvNhBVyOEjHPHWI4vl1r1v8HNH1_2jZu5DVJWyHWR56aCo1lhFH9_X6UAHUHbnXViDONZOVXlT
+9-WD0tk2zJGuwrhdZDAnPnAmjfwbwbpnr5Hmex1i1JiD7WVyP1kbfoej2TmdiYbxr9oBYaGQ29JI
+aHod7MQCLtvL1z5XgnDPLZ4y3_9SbqHKYbNa8UgZkTLF5EacGThYVFDLA9cbafHDtR1kMGE3vv4D
+EJ-0pAYTOGmKlVI7DwNyKsY9JTyudrxTqhOxi9jgcJNWiUaNe9yhL8Pyc2YBqUTTYhh_a2d1rvkZ
+0Gh1crviVxqBrIkRKaMRXZ4f1vDLz-3NvG_vwPOo8WRFo5nGmSdTw7CjBaigJ_cYCfDhoP11pEnw
+cndsZNcHs-v05LlxeIIMDD_f5Bvz-il_DLA4eK2HqgLdxh8ziSDl2azk14MJY4amzz6reEXUuKLV
+RsZGf_jbDGKhE2HuDQ5ovoLOi4OqE1oRuqh-dGxitrYouP2SN1l_1tCEMRth86FMV-6AQtZsvdUo
+y9MtQ7e35atjA8nHtgADlDTmJBKQiUHUsOZ77p1qp17HAFMovUkc739opfEYnKUn6Itpw5Ipm_Is
+ra6chJUfMpOFof5rb5OjqFAN27c_-mPo1lQU3ndYlKGh_n5V8ufX6v2Yri8WzOPf6hjVYotkmoMP
+NPAICDCB8W5ddBjsopzLVVEtaXDu9Qj6-zf77hT4iQ7rBd2Ner8iLqN3Kis0dvkNM3_uH8onau1G
+Y_YYw7PPSZyd2S_7Dd6G-IG4ayO6e5DD6oUwwekyiQI_3rTXNa_wldGxqW9u818010ekE4Qdlfcj
+beIn7fAeaOjReZ87hRgWyMs-EgTVHw8RL3yI_O6VvRTVRONRF1Y4C_-IYa8z-bfrwXx3BBd9TTgb
+EnS9wVOyC2OgUN6BhPLGLhxzkJ05nEjizXEc9t5EPYoSRwesajGGrrG_0-qWbuU5hKLPLkyeJLHb
+5HXOTVsrUR59Vov2M3_EswkxcImblox3k3VS2yihZMGyfqLzZIUXgd8ufkevKKU6DxwacGTb \ No newline at end of file
diff --git a/src/main/config/runner-web.xml b/src/main/config/runner-web.xml
new file mode 100644
index 0000000..b51aff4
--- /dev/null
+++ b/src/main/config/runner-web.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ Copyright (c) 2016 AT&T Intellectual Property. All rights reserved.
+-->
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ metadata-complete="false" version="3.0">
+
+ <context-param>
+ <param-name>contextConfigLocation</param-name>
+ <param-value>/WEB-INF/spring-servlet.xml,
+ classpath:applicationContext.xml
+ </param-value>
+ </context-param>
+
+ <context-param>
+ <param-name>spring.profiles.default</param-name>
+ <param-value>nooauth</param-value>
+ </context-param>
+
+ <listener>
+ <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+ </listener>
+
+ <servlet>
+ <servlet-name>ManagementServlet</servlet-name>
+ <servlet-class>ajsc.ManagementServlet</servlet-class>
+ </servlet>
+
+
+ <filter>
+ <filter-name>InterceptorFilter</filter-name>
+ <filter-class>ajsc.filters.InterceptorFilter</filter-class>
+ <init-param>
+ <param-name>preProcessor_interceptor_config_file</param-name>
+ <param-value>/etc/PreProcessorInterceptors.properties</param-value>
+ </init-param>
+ <init-param>
+ <param-name>postProcessor_interceptor_config_file</param-name>
+ <param-value>/etc/PostProcessorInterceptors.properties</param-value>
+ </init-param>
+
+ </filter>
+
+ <servlet>
+ <servlet-name>RestletServlet</servlet-name>
+ <servlet-class>ajsc.restlet.RestletSpringServlet</servlet-class>
+ <init-param>
+ <param-name>org.restlet.component</param-name>
+ <param-value>restletComponent</param-value>
+ </init-param>
+ </servlet>
+
+ <servlet>
+ <servlet-name>CamelServlet</servlet-name>
+ <servlet-class>ajsc.servlet.AjscCamelServlet</servlet-class>
+ </servlet>
+
+
+ <filter>
+ <filter-name>springSecurityFilterChain</filter-name>
+ <filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
+ </filter>
+
+ <servlet>
+ <servlet-name>spring</servlet-name>
+ <servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
+ <load-on-startup>1</load-on-startup>
+ </servlet>
+
+<!-- <servlet-mapping>
+ <servlet-name>spring</servlet-name>
+ <url-pattern>/</url-pattern>
+ </servlet-mapping>-->
+
+<!-- BEGIN jsp -->
+
+ <servlet id="jsp">
+ <servlet-name>jsp</servlet-name>
+ <servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>
+ </servlet>
+
+
+
+
+
+ <!-- BEGIN static content -->
+ <servlet>
+ <servlet-name>default</servlet-name>
+ <servlet-class>org.eclipse.jetty.servlet.DefaultServlet</servlet-class>
+ <init-param>
+ <param-name>dirAllowed</param-name>
+ <param-value>true</param-value>
+ </init-param>
+ </servlet>
+ <!-- END static content -->
+</web-app>
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
new file mode 100644
index 0000000..f53850d
--- /dev/null
+++ b/src/main/docker/Dockerfile
@@ -0,0 +1,26 @@
+FROM ubuntu:14.04
+
+ARG MICRO_HOME=/opt/app/data-router
+ARG BIN_HOME=$MICRO_HOME/bin
+
+RUN apt-get update
+
+# Install and setup java8
+RUN apt-get update && apt-get install -y software-properties-common
+## sudo -E is required to preserve the environment. If you remove that line, it will most like freeze at this step
+RUN sudo -E add-apt-repository ppa:openjdk-r/ppa && apt-get update && apt-get install -y openjdk-8-jdk
+## Setup JAVA_HOME, this is useful for docker commandline
+ENV JAVA_HOME usr/lib/jvm/java-8-openjdk-amd64
+RUN export JAVA_HOME
+
+# Build up the deployment folder structure
+RUN mkdir -p $MICRO_HOME
+ADD swm/package/nix/dist_files/appl/data-router/* $MICRO_HOME/
+RUN mkdir -p $BIN_HOME
+COPY *.sh $BIN_HOME
+RUN chmod 755 $BIN_HOME/*
+RUN ln -s /logs $MICRO_HOME/logs
+
+EXPOSE 9502 9502
+
+CMD /opt/app/data-router/bin/start.sh
diff --git a/src/main/java/org/openecomp/datarouter/entity/AaiEventEntity.java b/src/main/java/org/openecomp/datarouter/entity/AaiEventEntity.java
new file mode 100644
index 0000000..418c0d3
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/AaiEventEntity.java
@@ -0,0 +1,315 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+/*
+* ============LICENSE_START=======================================================
+* DataRouter
+* ================================================================================
+* Copyright © 2017 AT&T Intellectual Property.
+* Copyright © 2017 Amdocs
+* All rights reserved.
+* ================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+*
+* ECOMP and OpenECOMP are trademarks
+* and service marks of AT&T Intellectual Property.
+*/
+
+package org.openecomp.datarouter.entity;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonObject;
+
+/**
+ * Note: AAIEventEntity is a port forward of IndexDocument Has been renamed here to move forward
+ * with abstraction of document store technology.
+ */
+public class AaiEventEntity implements DocumentStoreDataEntity, Serializable {
+
+ private static final long serialVersionUID = -5188479658230319058L;
+
+ protected String entityType;
+ protected String entityPrimaryKeyName;
+ protected String entityPrimaryKeyValue;
+ protected ArrayList<String> searchTagCollection = new ArrayList<String>();
+ protected ArrayList<String> searchTagIdCollection = new ArrayList<String>();
+ protected ArrayList<String> crossEntityReferenceCollection = new ArrayList<String>();
+ protected String lastmodTimestamp;
+ protected String link;
+
+ private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
+ /*
+ * Generated fields, leave the settings for junit overrides
+ */
+
+ // generated, SHA-256 digest
+ protected String id;
+
+ /*
+ * generated based on searchTagCollection values
+ */
+ protected String searchTags;
+ protected String searchTagIds;
+ protected String crossReferenceEntityValues;
+
+
+ private static String convertBytesToHexString(byte[] bytesToConvert) {
+ StringBuffer hexString = new StringBuffer();
+ for (int i = 0; i < bytesToConvert.length; i++) {
+ hexString.append(Integer.toHexString(0xFF & bytesToConvert[i]));
+ }
+ return hexString.toString();
+ }
+
+ private static String concatArray(List<String> list, char delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ int listSize = list.size();
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ /*
+ * We'll try and create a unique identity key that we can use for differencing the previously
+ * imported record sets as we won't have granular control of what is created/removed and when. The
+ * best we can hope for is identification of resources by generated Id until the Identity-Service
+ * UUID is tagged against all resources, then we can use that instead.
+ */
+
+ private static String generateUniqueShaDigest(String entityType, String fieldName,
+ String fieldValue) throws NoSuchAlgorithmException {
+
+ /*
+ * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a
+ * java hashcode value.
+ */
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes());
+ return convertBytesToHexString(digest.digest());
+ }
+
+
+ public AaiEventEntity() {
+ SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT);
+ Timestamp timestamp = new Timestamp(System.currentTimeMillis());
+ String currentFormattedTimeStamp = dateFormat.format(timestamp);
+ this.lastmodTimestamp = currentFormattedTimeStamp;
+ }
+
+ public void deriveFields() throws NoSuchAlgorithmException {
+ this.id = generateUniqueShaDigest(entityType, entityPrimaryKeyName, entityPrimaryKeyValue);
+ this.searchTags = concatArray(searchTagCollection, ';');
+ this.searchTagIds = concatArray(searchTagIdCollection, ';');
+ this.crossReferenceEntityValues = concatArray(crossEntityReferenceCollection, ';');
+ }
+
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.datarouter.entity.AAIEventEntity#getAsJson()
+ */
+ @Override
+ public String getAsJson() throws IOException {
+
+ JsonObject obj = Json.createObjectBuilder().add("entityType", entityType)
+ .add("entityPrimaryKeyValue", entityPrimaryKeyValue).add("searchTagIDs", searchTagIds)
+ .add("searchTags", searchTags).add("crossEntityReferenceValues", crossReferenceEntityValues)
+ .add("lastmodTimestamp", lastmodTimestamp).add("link", link).build();
+
+ return obj.toString();
+ }
+
+
+ public void addSearchTagWithKey(String searchTag, String key) {
+ searchTagIdCollection.add(key);
+ searchTagCollection.add(searchTag);
+ }
+
+ public void addCrossEntityReferenceValue(String crossEntityReferenceValue) {
+ if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) {
+ crossEntityReferenceCollection.add(crossEntityReferenceValue);
+ }
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public String getEntityPrimaryKeyName() {
+ return entityPrimaryKeyName;
+ }
+
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.datarouter.entity.AAIEventEntity#getId()
+ */
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ public ArrayList<String> getSearchTagCollection() {
+ return searchTagCollection;
+ }
+
+ public String getSearchTags() {
+ return searchTags;
+ }
+
+ public String getSearchTagIDs() {
+ return searchTagIds;
+ }
+
+ public void setSearchTagIDs(String searchTagIDs) {
+ this.searchTagIds = searchTagIDs;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public void setSearchTagCollection(ArrayList<String> searchTagCollection) {
+ this.searchTagCollection = searchTagCollection;
+ }
+
+ public void setSearchTags(String searchTags) {
+ this.searchTags = searchTags;
+ }
+
+ public ArrayList<String> getSearchTagIdCollection() {
+ return searchTagIdCollection;
+ }
+
+ public void setSearchTagIdCollection(ArrayList<String> searchTagIdCollection) {
+ this.searchTagIdCollection = searchTagIdCollection;
+ }
+
+ public String getLastmodTimestamp() {
+ return lastmodTimestamp;
+ }
+
+ public void setLastmodTimestamp(String lastmodTimestamp) {
+ this.lastmodTimestamp = lastmodTimestamp;
+ }
+
+ public void setEntityPrimaryKeyName(String entityPrimaryKeyName) {
+ this.entityPrimaryKeyName = entityPrimaryKeyName;
+ }
+
+ public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) {
+ this.entityPrimaryKeyValue = entityPrimaryKeyValue;
+ }
+
+ public String getLink() {
+ return link;
+ }
+
+ public void setLink(String link) {
+ this.link = link;
+ }
+
+ /*
+ * public void mergeEntity(AAIEventEntity entityToMergeIn) {
+ *
+ * if ( entityToMergeIn == null ) { return; }
+ *
+ * if ( !entityToMergeIn.getEntityType().equals( entityType )) { entityType =
+ * entityToMergeIn.getEntityType(); }
+ *
+ * if ( !entityToMergeIn.getEntityType().equals( entityType )) { entityType =
+ * entityToMergeIn.getEntityType(); }
+ *
+ * }
+ */
+
+ @Override
+ public String toString() {
+ return "AAIEventEntity [" + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (entityPrimaryKeyName != null ? "entityPrimaryKeyName=" + entityPrimaryKeyName + ", "
+ : "")
+ + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", "
+ : "")
+ + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "")
+ + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", "
+ : "")
+ + (crossEntityReferenceCollection != null
+ ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "")
+ + "lastmodTimestamp=" + lastmodTimestamp + ", " + (id != null ? "id=" + id + ", " : "")
+ + (searchTags != null ? "searchTags=" + searchTags + ", " : "")
+ + (searchTagIds != null ? "searchTagIDs=" + searchTagIds + ", " : "")
+ + (crossReferenceEntityValues != null
+ ? "crossReferenceEntityValues=" + crossReferenceEntityValues : "")
+ + "]";
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/AggregationEntity.java b/src/main/java/org/openecomp/datarouter/entity/AggregationEntity.java
new file mode 100644
index 0000000..4830d67
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/AggregationEntity.java
@@ -0,0 +1,124 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import java.io.Serializable;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.openecomp.datarouter.util.NodeUtils;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * The Class AggregationEntity. Mimics functionality of AAIUI's AggregationEntity
+ */
+public class AggregationEntity implements DocumentStoreDataEntity, Serializable {
+ private String id;
+ private String link;
+ private String lastmodTimestamp;
+
+ public String getLink() {
+ return link;
+ }
+ public void setLink(String link) {
+ this.link = link;
+ }
+ public String getId() {
+ // make sure that deliveFields() is called before getting the id
+ return id;
+ }
+ public void setId(String id) {
+ this.id = id;
+ }
+
+
+ public String getLastmodTimestamp() {
+ return lastmodTimestamp;
+ }
+ public void setLastmodTimestamp(String lastmodTimestamp) {
+ this.lastmodTimestamp = lastmodTimestamp;
+ }
+
+
+ Map<String, String> attributes = new HashMap<String, String>();
+ ObjectMapper mapper = new ObjectMapper();
+
+ /**
+ * Instantiates a new aggregation entity.
+ */
+ public AggregationEntity() { }
+
+ public void deriveFields(JsonNode uebPayload) {
+
+ this.setId(NodeUtils.generateUniqueShaDigest(link));
+
+ this.setLastmodTimestamp(Long.toString(System.currentTimeMillis()));
+
+ JsonNode entityNode = uebPayload.get("entity");
+
+ Iterator<Entry<String, JsonNode>> nodes = entityNode.fields();
+
+ while (nodes.hasNext()) {
+ Map.Entry<String, JsonNode> entry = (Map.Entry<String, JsonNode>) nodes.next();
+ if (!entry.getKey().equalsIgnoreCase("relationship-list")){
+ attributes.put(entry.getKey(), entry.getValue().asText());
+ }
+ }
+ }
+
+ public void copyAttributeKeyValuePair(Map<String, Object> map){
+ for(String key: map.keySet()){
+ if (!key.equalsIgnoreCase("relationship-list")){ // ignore relationship data which is not required in aggregation
+ this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can contain an object as value
+ }
+ }
+ }
+
+ public void addAttributeKeyValuePair(String key, String value){
+ this.attributes.put(key, value);
+ }
+
+ public String getAsJson() {
+ ObjectNode rootNode = mapper.createObjectNode();
+ rootNode.put("link", this.getLink());
+ rootNode.put("lastmodTimestamp", lastmodTimestamp);
+ for (String key: this.attributes.keySet()){
+ rootNode.put(key, this.attributes.get(key));
+ }
+ return rootNode.toString();
+ }
+
+ @Override
+ public String toString() {
+ return "AggregationEntity [id=" + id + ", link=" + link + ", attributes=" + attributes
+ + ", mapper=" + mapper + "]";
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/DocumentStoreDataEntity.java b/src/main/java/org/openecomp/datarouter/entity/DocumentStoreDataEntity.java
new file mode 100644
index 0000000..61df316
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/DocumentStoreDataEntity.java
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import java.io.IOException;
+
+public interface DocumentStoreDataEntity {
+
+ public String getId();
+
+ public String getAsJson() throws IOException;
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/OxmEntityDescriptor.java b/src/main/java/org/openecomp/datarouter/entity/OxmEntityDescriptor.java
new file mode 100644
index 0000000..9f486e4
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/OxmEntityDescriptor.java
@@ -0,0 +1,126 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import java.util.List;
+
+import org.openecomp.datarouter.util.CrossEntityReference;
+
+
+public class OxmEntityDescriptor {
+
+ private String entityName;
+
+ private List<String> primaryKeyAttributeName;
+
+ private List<String> searchableAttributes;
+
+ private CrossEntityReference crossEntityReference;
+
+ private List<String> alias;
+
+ private List<String> suggestableAttributes;
+
+ boolean isSuggestableEntity;
+
+ public String getEntityName() {
+ return entityName;
+ }
+
+ public void setEntityName(String entityName) {
+ this.entityName = entityName;
+ }
+
+ public List<String> getPrimaryKeyAttributeName() {
+ return primaryKeyAttributeName;
+ }
+
+ public void setPrimaryKeyAttributeName(List<String> primaryKeyAttributeName) {
+ this.primaryKeyAttributeName = primaryKeyAttributeName;
+ }
+
+ public List<String> getSearchableAttributes() {
+ return searchableAttributes;
+ }
+
+ public void setSearchableAttributes(List<String> searchableAttributes) {
+ this.searchableAttributes = searchableAttributes;
+ }
+
+ public boolean hasSearchableAttributes() {
+
+ if ( this.searchableAttributes == null) {
+ return false;
+ }
+
+ if ( this.searchableAttributes.size() > 0 ) {
+ return true;
+ }
+
+ return false;
+
+ }
+
+ public CrossEntityReference getCrossEntityReference() {
+ return crossEntityReference;
+ }
+
+ public void setCrossEntityReference(CrossEntityReference crossEntityReference) {
+ this.crossEntityReference = crossEntityReference;
+ }
+
+ public List<String> getAlias() {
+ return alias;
+ }
+
+ public void setAlias(List<String> alias) {
+ this.alias = alias;
+ }
+
+ public List<String> getSuggestableAttributes() {
+ return suggestableAttributes;
+ }
+
+ public void setSuggestableAttributes(List<String> suggestableAttributes) {
+ this.suggestableAttributes = suggestableAttributes;
+ }
+
+ public boolean isSuggestableEntity() {
+ return isSuggestableEntity;
+ }
+
+ public void setSuggestableEntity(boolean isSuggestableEntity) {
+ this.isSuggestableEntity = isSuggestableEntity;
+ }
+
+ @Override
+ public String toString() {
+ return "OxmEntityDescriptor [entityName=" + entityName + ", primaryKeyAttributeName="
+ + primaryKeyAttributeName + ", searchableAttributes=" + searchableAttributes
+ + ", crossEntityReference=" + crossEntityReference + ", alias=" + alias
+ + ", suggestableAttributes=" + suggestableAttributes + ", isSuggestableEntity="
+ + isSuggestableEntity + "]";
+ }
+} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/datarouter/entity/PolicyResponse.java b/src/main/java/org/openecomp/datarouter/entity/PolicyResponse.java
new file mode 100644
index 0000000..fd577fa
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/PolicyResponse.java
@@ -0,0 +1,72 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+/**
+ * Provides information about the level of success of a policy execution against a routed query.
+ */
+public class PolicyResponse {
+
+ private ResponseType responseType;
+
+ private String responseData;
+
+ private int httpResponseCode;
+
+ public PolicyResponse(ResponseType responseType, String responseData) {
+ super();
+ this.responseType = responseType;
+ this.responseData = responseData;
+ }
+
+ public ResponseType getResponseType() {
+ return responseType;
+ }
+
+ public String getResponseData() {
+ return responseData;
+ }
+
+
+ public int getHttpResponseCode() {
+ return httpResponseCode;
+ }
+
+ public void setHttpResponseCode(int httpResponseCode) {
+ this.httpResponseCode = httpResponseCode;
+ }
+
+ @Override
+ public String toString() {
+ return "PolicyResponse [responseType=" + responseType + ", responseData=" + responseData
+ + ", httpResponseCode=" + httpResponseCode + "]";
+ }
+
+
+
+ public enum ResponseType {
+ SUCCESS, PARTIAL_SUCCESS, FAILURE;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/SuggestionSearchEntity.java b/src/main/java/org/openecomp/datarouter/entity/SuggestionSearchEntity.java
new file mode 100644
index 0000000..ae2711b
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/SuggestionSearchEntity.java
@@ -0,0 +1,281 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.openecomp.datarouter.util.NodeUtils;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+public class SuggestionSearchEntity implements DocumentStoreDataEntity, Serializable {
+ private static final long serialVersionUID = -3636393943669310760L;
+
+ protected String id; // generated SHA-256 digest
+ private String entityType;
+ private List<String> entityTypeAliases;
+ private List<String> suggestionInputPermutations = new ArrayList<>();
+ private List<String> statusPermutations = new ArrayList<>();
+ private List<String> suggestableAttr = new ArrayList<>();
+ private Map<String, String> payload = new HashMap<>();
+ private JSONObject payloadJsonNode = new JSONObject();
+ private StringBuffer outputString = new StringBuffer();
+
+ public void deriveFields() throws NoSuchAlgorithmException {
+ int payloadEntryCounter = 1;
+
+ for (Map.Entry<String, String> payload : getPayload().entrySet()) {
+ if (payload.getValue() != null && payload.getValue().length() > 0) {
+ this.getPayloadJsonNode().put(payload.getKey(), payload.getValue());
+ this.outputString.append(payload.getValue());
+
+ if (payloadEntryCounter < getPayload().entrySet().size()) {
+ this.outputString.append(" and ");
+ } else {
+ this.outputString.append(" ");
+ }
+ }
+
+ payloadEntryCounter++;
+ }
+
+ this.outputString.append(getEntityTypeAliases().get(0));
+ this.id = NodeUtils.generateUniqueShaDigest(outputString.toString());
+ }
+
+ /**
+ * Launch pad for performing permutations of the entity type, aliases, prov status and orchestration status.
+ * SHA-256 will result in an ID with a guaranteed uniqueness compared to just a java hashcode value.
+ * @return
+ */
+ public List<String> generateSuggestionInputPermutations() {
+ List<String> entityNames = new ArrayList<>();
+ entityNames.add(entityType);
+
+ if ((entityTypeAliases != null) && !(entityTypeAliases.isEmpty())) {
+ for (String alias : entityTypeAliases) {
+ entityNames.add(alias);
+ }
+ }
+
+ ArrayList<String> listToPermutate = new ArrayList<>(statusPermutations);
+ ArrayList<String> listOfSearchSuggestionPermutations = new ArrayList<>();
+
+ for (String entityName : entityNames) {
+ listToPermutate.add(entityName);
+ permutateList(listToPermutate, new ArrayList<String>(), listToPermutate.size(), listOfSearchSuggestionPermutations);
+ listToPermutate.remove(entityName);
+ }
+
+ return listOfSearchSuggestionPermutations;
+ }
+
+ public boolean isSuggestableDoc() {
+ return this.getPayload().size() != 0;
+ }
+
+ /**
+ * Generate all permutations of Entity Type and (Prov Status and/or Orchestration Status)
+ * @param list The list of unique elements to create permutations of
+ * @param permutation A list to hold the current permutation used during
+ * @param size To keep track of the original size of the number of unique elements
+ * @param listOfSearchSuggestionPermutationList The list to hold all of the different permutations
+ */
+ private void permutateList(List<String> list, List<String> permutation, int size,
+ List<String> listOfSearchSuggestionPermutationList) {
+ if (permutation.size() == size) {
+ StringBuilder newPermutation = new StringBuilder();
+
+ for (int i = 0; i < permutation.size(); i++) {
+ newPermutation.append(permutation.get(i)).append(" ");
+ }
+
+ listOfSearchSuggestionPermutationList.add(newPermutation.toString().trim());
+
+ return;
+ }
+
+ String[] availableItems = list.toArray(new String[0]);
+
+ for (String i : availableItems) {
+ permutation.add(i);
+ list.remove(i);
+ permutateList(list, permutation, size, listOfSearchSuggestionPermutationList);
+ list.add(i);
+ permutation.remove(i);
+ }
+ }
+
+ /**
+ * return Custom-built JSON representation of this class
+ */
+ @Override
+ public String getAsJson() throws IOException {
+ if (entityType == null || suggestionInputPermutations == null) {
+ return null;
+ }
+
+ JSONObject rootNode = new JSONObject();
+ JSONArray inputArray = new JSONArray();
+ JSONObject payloadNode = new JSONObject();
+ StringBuffer outputString = new StringBuffer();
+
+ int payloadEntryCounter = 1;
+
+ // Add prov and orchestration status to search suggestion string
+ for (Map.Entry<String, String> payload : getPayload().entrySet()) {
+ payloadNode.put(payload.getKey(), payload.getValue());
+ outputString.append(payload.getValue());
+
+ if (payloadEntryCounter < getPayload().entrySet().size()) {
+ // Add the word "and" between prov and orchestration statuses, if both are present
+ outputString.append(" and ");
+ payloadEntryCounter++;
+ }
+ }
+
+ // Add entity type to search suggestion string. We've decided to use the first entity type alias from the OXM
+ outputString.append(" ").append(getEntityTypeAliases().get(0));
+
+ for (String permutation : suggestionInputPermutations) {
+ inputArray.put(permutation);
+ }
+
+ // Build up the search suggestion as JSON
+ JSONObject entitySuggest = new JSONObject();
+ entitySuggest.put("input", inputArray);
+ entitySuggest.put("output", outputString);
+ entitySuggest.put("payload", payloadNode);
+ rootNode.put("entity_suggest", entitySuggest);
+
+ return rootNode.toString();
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public List<String> getEntityTypeAliases() {
+ return entityTypeAliases;
+ }
+
+ public void setEntityTypeAliases(List<String> entityTypeAliases) {
+ this.entityTypeAliases = entityTypeAliases;
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public StringBuffer getOutputString() {
+ return outputString;
+ }
+
+ public void setOutputString(StringBuffer outputString) {
+ this.outputString = outputString;
+ }
+
+ public Map<String, String> getPayload() {
+ return payload;
+ }
+
+ public void setPayloadFromResponse(JsonNode node) {
+ Map<String, String> nodePayload = new HashMap<>();
+ JsonNode entityNode = node.get("entity");
+ if (suggestableAttr != null) {
+ for (String attribute : suggestableAttr) {
+ if (entityNode.get(attribute) != null && !entityNode.get(attribute).asText().trim().isEmpty()) {
+ nodePayload.put(attribute, entityNode.get(attribute).asText());
+ this.statusPermutations.add(entityNode.get(attribute).asText());
+ }
+ }
+ this.setPayload(nodePayload);
+ }
+ }
+
+ public void setPayload(Map<String, String> payload) {
+ this.payload = payload;
+ }
+
+ public JSONObject getPayloadJsonNode() {
+ return payloadJsonNode;
+ }
+
+ public void setPayloadJsonNode(JSONObject payloadJsonNode) {
+ this.payloadJsonNode = payloadJsonNode;
+ }
+
+ public List<String> getStatusPermutations() {
+ return statusPermutations;
+ }
+
+ public List<String> getSuggestableAttr() {
+ return suggestableAttr;
+ }
+
+ public List<String> getSuggestionInputPermutations() {
+ return this.suggestionInputPermutations;
+ }
+
+ public void setStatusPermutations(List<String> statusPermutations) {
+ this.statusPermutations = statusPermutations;
+ }
+
+ public void setSuggestableAttr(ArrayList<String> attributes) {
+ for (String attribute : attributes) {
+ this.suggestableAttr.add(attribute);
+ }
+ }
+
+ public void setSuggestionInputPermutations(List<String> permutations) {
+ this.suggestionInputPermutations = permutations;
+ }
+
+ @Override
+ public String toString() {
+ return "SuggestionSearchEntity [id=" + id + ", entityType=" + entityType
+ + ", entityTypeAliases=" + entityTypeAliases + ", suggestionInputPermutations="
+ + suggestionInputPermutations + ", statusPermutations=" + statusPermutations
+ + ", suggestableAttr=" + suggestableAttr + ", payload=" + payload + ", payloadJsonNode="
+ + payloadJsonNode + ", outputString=" + outputString + "]";
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/TopographicalEntity.java b/src/main/java/org/openecomp/datarouter/entity/TopographicalEntity.java
new file mode 100644
index 0000000..79cdfcd
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/TopographicalEntity.java
@@ -0,0 +1,191 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+
+import javax.json.Json;
+import javax.json.JsonObject;
+
+public class TopographicalEntity implements DocumentStoreDataEntity, Serializable {
+
+ private static final long serialVersionUID = -5188479658230319058L;
+
+ protected String entityType;
+ protected String entityPrimaryKeyValue;
+ protected String entityPrimaryKeyName;
+ protected String latitude;
+ protected String longitude;
+ protected String selfLink;
+
+ // generated, SHA-256 digest
+ protected String id;
+
+ private static String convertBytesToHexString(byte[] bytesToConvert) {
+ StringBuffer hexString = new StringBuffer();
+ for (int i = 0; i < bytesToConvert.length; i++) {
+ hexString.append(Integer.toHexString(0xFF & bytesToConvert[i]));
+ }
+ return hexString.toString();
+ }
+
+ private static String concatArray(List<String> list, char delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ int listSize = list.size();
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+ }
+
+ return result.toString();
+ }
+
+ /*
+ * We'll try and create a unique identity key that we can use for
+ * differencing the previously imported record sets as we won't have granular
+ * control of what is created/removed and when. The best we can hope for is
+ * identification of resources by generated Id until the Identity-Service
+ * UUID is tagged against all resources, then we can use that instead.
+ */
+ public static String generateUniqueShaDigest(String entityType, String fieldName,
+ String fieldValue) throws NoSuchAlgorithmException {
+
+ /*
+ * Basically SHA-256 will result in an identity with a guaranteed
+ * uniqueness compared to just a java hashcode value.
+ */
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+ digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes());
+ return convertBytesToHexString(digest.digest());
+ }
+
+ public TopographicalEntity() {}
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.openecomp.datarouter.entity.TopographicalEntity#getAsJson()
+ */
+ public String getAsJson() throws IOException {
+
+ JsonObject obj =
+ Json.createObjectBuilder().add("entityType", entityType)
+ .add("pkey", entityPrimaryKeyValue)
+ .add("location", Json.createObjectBuilder()
+ .add("lat", latitude)
+ .add("lon", longitude))
+ .add("selfLink", selfLink).build();
+
+ return obj.toString();
+ }
+
+
+ @Override
+ public String toString() {
+ return "TopographicalEntity [" + ("entityType=" + entityType + ", ")
+ + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ")
+ + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ")
+ + ("ID=" + id + ", ")
+ + ("selfLink=" + selfLink) + "]";
+ }
+
+ @Override
+ public String getId() {
+ return this.id;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getEntityPrimaryKeyValue() {
+ return entityPrimaryKeyValue;
+ }
+
+ public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) {
+ this.entityPrimaryKeyValue = entityPrimaryKeyValue;
+ }
+
+ public String getEntityPrimaryKeyName() {
+ return entityPrimaryKeyName;
+ }
+
+ public void setEntityPrimaryKeyName(String entityPrimaryKeyName) {
+ this.entityPrimaryKeyName = entityPrimaryKeyName;
+ }
+
+ public String getLatitude() {
+ return latitude;
+ }
+
+ public void setLatitude(String latitude) {
+ this.latitude = latitude;
+ }
+
+ public String getLongitude() {
+ return longitude;
+ }
+
+ public void setLongitude(String longitude) {
+ this.longitude = longitude;
+ }
+
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ public void setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ }
+
+ public static long getSerialversionuid() {
+ return serialVersionUID;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/entity/UebEventHeader.java b/src/main/java/org/openecomp/datarouter/entity/UebEventHeader.java
new file mode 100644
index 0000000..5eab97f
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/entity/UebEventHeader.java
@@ -0,0 +1,169 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.entity;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * A convenience POJO for mapping the UebEventHeader from a UEB Event.
+ *
+ * @author davea
+ */
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class UebEventHeader {
+
+ private String timestamp;
+
+ private String id;
+
+ private String action;
+
+ private String domain;
+
+ private String sourceName;
+
+ private String entityLink;
+
+ private String entityType;
+
+ private String topEntityType;
+
+ private String sequenceNumber;
+
+ private String eventType;
+
+ private String version;
+
+ public String getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp(String timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getAction() {
+ return action;
+ }
+
+ public void setAction(String action) {
+ this.action = action;
+ }
+
+ public String getDomain() {
+ return domain;
+ }
+
+ public void setDomain(String domain) {
+ this.domain = domain;
+ }
+
+ public String getSourceName() {
+ return sourceName;
+ }
+
+ @JsonProperty("source-name")
+ public void setSourceName(String sourceName) {
+ this.sourceName = sourceName;
+ }
+
+ public String getEntityLink() {
+ return entityLink;
+ }
+
+ @JsonProperty("entity-link")
+ public void setEntityLink(String entityLink) {
+ this.entityLink = entityLink;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ @JsonProperty("entity-type")
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getTopEntityType() {
+ return topEntityType;
+ }
+
+ @JsonProperty("top-entity-type")
+ public void setTopEntityType(String topEntityType) {
+ this.topEntityType = topEntityType;
+ }
+
+ public String getSequenceNumber() {
+ return sequenceNumber;
+ }
+
+ @JsonProperty("sequence-number")
+ public void setSequenceNumber(String sequenceNumber) {
+ this.sequenceNumber = sequenceNumber;
+ }
+
+ public String getEventType() {
+ return eventType;
+ }
+
+ @JsonProperty("event-type")
+ public void setEventType(String eventType) {
+ this.eventType = eventType;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ @Override
+ public String toString() {
+ return "UebEventHeader [" + (timestamp != null ? "timestamp=" + timestamp + ", " : "")
+ + (id != null ? "id=" + id + ", " : "") + (action != null ? "action=" + action + ", " : "")
+ + (domain != null ? "domain=" + domain + ", " : "")
+ + (sourceName != null ? "sourceName=" + sourceName + ", " : "")
+ + (entityLink != null ? "entityLink=" + entityLink + ", " : "")
+ + (entityType != null ? "entityType=" + entityType + ", " : "")
+ + (topEntityType != null ? "topEntityType=" + topEntityType + ", " : "")
+ + (sequenceNumber != null ? "sequenceNumber=" + sequenceNumber + ", " : "")
+ + (eventType != null ? "eventType=" + eventType + ", " : "")
+ + (version != null ? "version=" + version : "") + "]";
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/exception/BaseDataRouterException.java b/src/main/java/org/openecomp/datarouter/exception/BaseDataRouterException.java
new file mode 100644
index 0000000..0cd0381
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/exception/BaseDataRouterException.java
@@ -0,0 +1,89 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.exception;
+
+/*
+ * COPYRIGHT NOTICE: Copyright (c) 2016 Team Pacifica (Amdocs & AT&T) The contents and intellectual
+ * property contained herein, remain the property of Team Pacifica (Amdocs & AT&T).
+ */
+
+import java.util.Locale;
+
+/**
+ * Base SMAdaptor exception class.
+ */
+public class BaseDataRouterException extends Exception {
+
+ /** Force serialVersionUID. */
+ private static final long serialVersionUID = -6663403070792969748L;
+
+ /** Default locale. */
+ public static final Locale LOCALE = Locale.US;
+
+ /** Exception id. */
+ private final String id;
+
+ /**
+ * Constructor.
+ *
+ * @param id the incoming id.
+ */
+ public BaseDataRouterException(final String id) {
+ super();
+ this.id = id;
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param id the incoming id
+ * @param message the incoming message
+ */
+ public BaseDataRouterException(final String id, final String message) {
+ super(message);
+ this.id = id;
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param id the incoming id
+ * @param message the incoming message
+ * @param cause the incoming throwable
+ */
+ public BaseDataRouterException(final String id, final String message, final Throwable cause) {
+ super(message, cause);
+ this.id = id;
+ }
+
+ /**
+ * Get the exception id.
+ *
+ * @return the exception id
+ */
+ public String getId() {
+ return this.id;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/exception/DataRouterError.java b/src/main/java/org/openecomp/datarouter/exception/DataRouterError.java
new file mode 100644
index 0000000..4920591
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/exception/DataRouterError.java
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.exception;
+
+/*
+ * COPYRIGHT NOTICE: Copyright (c) 2016 Team Pacifica (Amdocs & AT&T) The contents and intellectual
+ * property contained herein, remain the property of Team Pacifica (Amdocs & AT&T).
+ */
+
+import java.text.MessageFormat;
+
+import javax.ws.rs.core.Response.Status;
+
+/**
+ * DL enum for error conditions.
+ */
+public enum DataRouterError {
+
+ /** Parsing exceptions - Range 100..199. */
+ DL_PARSE_100("DL-100", "Unable to find resource {0} in the model", Status.BAD_REQUEST),
+ DL_PARSE_101("DL-101", "Unable to parse ", Status.BAD_REQUEST),
+ DL_PARSE_102("DL-102", "Sot Filter error: {0} ", Status.INTERNAL_SERVER_ERROR),
+ DL_PARSE_103("DL-103", "URL Parsing error: {0} ", Status.BAD_REQUEST),
+ DL_PARSE_104("DL-104", "Missing Ids filter: {0} ", Status.BAD_REQUEST),
+ DL_PARSE_105("DL-105", "Invalid Ids filter: {0} ", Status.BAD_REQUEST),
+
+ /** Validation exceptions - Range 200..299. */
+ DL_VALIDATION_200("DL-200", "Missing X-TransactionId in header ", Status.BAD_REQUEST),
+
+ /** Other components integration errors - Range 300..399. */
+ DL_INTEGRATION_300("DL-300", "Unable to decorate Graph ", Status.INTERNAL_SERVER_ERROR),
+
+ /** Environment related exceptions - Range 400..499. */
+ DL_ENV_400("DL-400", "Unable to find file {0} ", Status.INTERNAL_SERVER_ERROR),
+ DL_ENV_401("DL-401", "Unable to Load OXM Models", Status.INTERNAL_SERVER_ERROR),
+
+ /** Other components integration errors - Range 500..599. */
+ DL_AUTH_500("DL-500", "Unable to authorize User ", Status.FORBIDDEN);
+
+ /** The error id. */
+ private String id;
+ /** The error message. */
+ private String message;
+ /** The error http return code. */
+ private Status status;
+
+ /**
+ * Constructor.
+ *
+ * @param id the error id
+ * @param message the error message
+ */
+ DataRouterError(final String id, final String message, final Status status) {
+ this.id = id;
+ this.message = message;
+ this.status = status;
+ }
+
+ /**
+ * Get the id.
+ *
+ * @return the error id
+ */
+ public String getId() {
+ return this.id;
+ }
+
+ /**
+ * Get the message.
+ *
+ * @param args the error arguments
+ * @return the error message
+ */
+ public String getMessage(final Object... args) {
+ final MessageFormat formatter = new MessageFormat("");
+ formatter.applyPattern(this.message);
+ return formatter.format(args);
+ }
+
+ public Status getHttpStatus() {
+ return this.status;
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/logging/DataRouterMsgs.java b/src/main/java/org/openecomp/datarouter/logging/DataRouterMsgs.java
new file mode 100644
index 0000000..8304c96
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/logging/DataRouterMsgs.java
@@ -0,0 +1,161 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.logging;
+
+import com.att.eelf.i18n.EELFResourceManager;
+
+import org.openecomp.cl.eelf.LogMessageEnum;
+
+public enum DataRouterMsgs implements LogMessageEnum {
+
+ /** Data Layer Service started. */
+ SERVICE_STARTED,
+
+ /**
+ * Data Layer Service failed to start.
+ * Arguments: {0} = Exception message.
+ */
+ STARTUP_FAILURE,
+
+ /**
+ * File has been changed.
+ * Arguments: {0} = File name.
+ */
+ FILE_CHANGED,
+
+ /**
+ * File has been reloaded.
+ * Arguments: {0} = File name.
+ */
+ FILE_RELOADED,
+
+ /**
+ * Reports the configuration watcher interval.
+ * Arguments: {0} = Interval
+ */
+ REPORT_CONFIG_WATCHER_INT,
+
+ /**
+ * Loading properties file.
+ * Arguments: {0} = File name.
+ */
+ LOADING_PROPERTIES,
+
+ /**
+ * Properties file has been loaded.
+ * Arguments: {0} = File name.
+ */
+ PROPERTIES_LOADED,
+
+ /**
+ * UEB no events received.
+ * Arguments: {0} = Topic name
+ */
+ UEB_NO_EVENTS_RECEIVED,
+
+ /**
+ * Routing policies are being configured.
+ */
+ CONFIGURING_ROUTING_POLICIES,
+
+ /**
+ * A properties file could not be successfully loaded.
+ * Arguments: {0} = File name.
+ */
+ LOAD_PROPERTIES_FAILURE,
+
+ /**
+ * Failed to register for an event topic with UEB.
+ * Arguments: {0} = Topic {1} = Error reason
+ */
+ UEB_CONNECT_ERR,
+
+ /**
+ * An error occurred while trying to route a query.
+ * Arguments: {0} = Query {1} = Error reason
+ */
+ QUERY_ROUTING_ERR,
+
+ /**
+ * Error in file monitor block.
+ */
+ FILE_MON_BLOCK_ERR,
+
+ /**
+ * Failure to create a property map.
+ */
+ CREATE_PROPERTY_MAP_ERR,
+
+ /**
+ * An error occurred reading from a file stream.
+ */
+ FILE_STREAM_ERR,
+
+ /**
+ * An error occurred while trying to configure a routing policy.
+ * Arguments: {0} = policy name {1} = source of the routing policy {2} = action of the routing
+ * policy
+ */
+ ROUTING_POLICY_CONFIGURATION_ERROR,
+
+ /**
+ * Received request {0} {1} from {2}. Sending response: {3}
+ * Arguments: {0} = operation {1} = target URL {2} = source {3} = response code
+ */
+ PROCESS_REST_REQUEST,
+
+ /**
+ * Processed event {0}. Result: {1}
+ * Arguments: {0} = event topic {1} = result
+ */
+ PROCESS_EVENT,
+
+ /**
+ * Arguments: {0} = Error
+ */
+
+ BAD_REST_REQUEST,
+
+ /**
+ * Arguments: {0} = Search index URL {1} = Reason
+ */
+ FAIL_TO_CREATE_SEARCH_INDEX,
+
+ /**
+ * Arguments: {0} = Successfully created index at endpoint
+ */
+ SEARCH_INDEX_CREATE_SUCCESS,
+
+ INVALID_OXM_FILE,
+
+ INVALID_OXM_DIR;
+
+ /**
+ * Static initializer to ensure the resource bundles for this class are loaded...
+ */
+ static {
+ EELFResourceManager.loadMessageBundle("logging/DataRouterMsgs");
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/logging/EntityEventPolicyMsgs.java b/src/main/java/org/openecomp/datarouter/logging/EntityEventPolicyMsgs.java
new file mode 100644
index 0000000..09be3a0
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/logging/EntityEventPolicyMsgs.java
@@ -0,0 +1,218 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.logging;
+
+import com.att.eelf.i18n.EELFResourceManager;
+
+import org.openecomp.cl.eelf.LogMessageEnum;
+
+public enum EntityEventPolicyMsgs implements LogMessageEnum {
+
+ // Error Messages
+ /**
+ * Discarding event.
+ * Arguments:
+ * {0} = reason
+ * {1} = Payload:
+ */
+ DISCARD_AAI_EVENT_VERBOSE,
+
+ /**
+ * Discarding event.
+ * Arguments:
+ * {0} = Reason
+ */
+ DISCARD_AAI_EVENT_NONVERBOSE,
+
+ /**
+ * OXM version: {0} is not supported.
+ * Arguments:
+ * {0} = OXM Version
+ */
+ OXM_VERSION_NOT_SUPPORTED,
+
+ /**
+ * Failed to parse UEB payload.
+ * Arguments:
+ * {0}
+ * {1}
+ */
+ FAILED_TO_PARSE_UEB_PAYLOAD,
+
+ /**
+ * Unable to retrieve etag at {0} for entity with id {1}
+ * Arguments:
+ * {0} = Resource endpoint.
+ * {1} = Entity id.
+ */
+ NO_ETAG_AVAILABLE_FAILURE,
+
+ /**
+ * Failed to update entity {0} with operation {1}.
+ * Arguments:
+ * {0} = Entity
+ * {1} = Operation
+ */
+ FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE,
+
+
+ /**
+ * Action: {0} is not supported.
+ * Argument:
+ * {0} = Operation
+ */
+ ENTITY_OPERATION_NOT_SUPPORTED,
+
+ /**
+ * Arguments:
+ * {0} = reason
+ */
+ DISCARD_UPDATING_SEARCH_SUGGESTION_DATA,
+
+ /**
+ * Discarding topographical data. Reason: {0}. Payload: {1}
+ * Arguments:
+ * {0} = Reason for discarding data.
+ * {1} = Payload
+ */
+ DISCARD_UPDATING_TOPOGRAPHY_DATA_VERBOSE,
+
+ /**
+ * Discarding topographical data. Reason: {0}
+ * Arguments:
+ * {0} = Reason for discarding data.
+ */
+ DISCARD_UPDATING_TOPOGRAPHY_DATA_NONVERBOSE,
+
+ /**
+ * Failed to load OXM Model.
+ */
+ PROCESS_OXM_MODEL_MISSING,
+
+ /**
+ * Failed to create Search index {0} due to: {1}
+ *
+ * Arguments:
+ * {0} = Search index
+ * {1} = Error cause
+ */
+ FAIL_TO_CREATE_SEARCH_INDEX,
+
+ /**
+ * Failed to find OXM version in UEB payload. {0}
+ * Arguments:
+ * {0} = OXM version.
+ */
+ FAILED_TO_FIND_OXM_VERSION,
+
+
+ // Info Messages
+
+ /**
+ * Processing AAI Entity Event Policy:
+ * Arguments:
+ * {0} = Action
+ * {1} = Entity Type
+ * {2} = Payload
+ */
+ PROCESS_AAI_ENTITY_EVENT_POLICY_VERBOSE,
+
+ /**
+ * Processing AAI Entity Event Policy:
+ * Arguments:
+ * {0} = Action
+ * {1} = Entity Type
+ */
+ PROCESS_AAI_ENTITY_EVENT_POLICY_NONVERBOSE,
+
+ /**
+ * Cross Entity Reference synchronization {0}
+ * Arguments:
+ * {0} = Error string
+ *
+ */
+ CROSS_ENTITY_REFERENCE_SYNC,
+
+ /**
+ * Operation {0} completed in {1} ms with no errors
+ * Arguments:
+ * {0} = Operation type
+ * {1} = Time in ms.
+ */
+ OPERATION_RESULT_NO_ERRORS,
+
+ /**
+ * Found OXM model: {0}
+ * Arguments:
+ * {0} = Key pair.
+ */
+ PROCESS_OXM_MODEL_FOUND,
+
+ /**
+ * Successfully created index at {0}
+ *
+ * Arguments:
+ * {0} = Index resource endpoint
+ */
+ SEARCH_INDEX_CREATE_SUCCESS,
+
+ /**
+ * Entity Event Policy component started.
+ */
+ ENTITY_EVENT_POLICY_REGISTERED,
+
+ /**
+ * Arguments:
+ * {0} = Entity name
+ */
+ PRIMARY_KEY_NULL_FOR_ENTITY_TYPE,
+
+ /**
+ * Arguments: {0} = UEB payload
+ */
+ UEB_INVALID_PAYLOAD_JSON_FORMAT,
+
+ /**
+ * Arguments: {0} = Event header
+ */
+ UEB_FAILED_TO_PARSE_PAYLOAD,
+
+ /**
+ * Arguments: {0} = Exception
+ */
+ UEB_FAILED_UEBEVENTHEADER_CONVERSION,
+
+ /**
+ * Arguments: {0} = UEB event header
+ */
+ UEB_EVENT_HEADER_PARSED;
+
+ /**
+ * Static initializer to ensure the resource bundles for this class are loaded...
+ */
+ static {
+ EELFResourceManager.loadMessageBundle("logging/EntityEventPolicyMsgs");
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicy.java b/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicy.java
new file mode 100644
index 0000000..170c646
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicy.java
@@ -0,0 +1,1162 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.policy;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.Processor;
+import org.eclipse.jetty.util.security.Password;
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.helper.DatabaseField;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.cl.mdc.MdcContext;
+import org.openecomp.datarouter.entity.AaiEventEntity;
+import org.openecomp.datarouter.entity.AggregationEntity;
+import org.openecomp.datarouter.entity.DocumentStoreDataEntity;
+import org.openecomp.datarouter.entity.OxmEntityDescriptor;
+import org.openecomp.datarouter.entity.SuggestionSearchEntity;
+import org.openecomp.datarouter.entity.TopographicalEntity;
+import org.openecomp.datarouter.entity.UebEventHeader;
+import org.openecomp.datarouter.logging.DataRouterMsgs;
+import org.openecomp.datarouter.logging.EntityEventPolicyMsgs;
+import org.openecomp.datarouter.util.CrossEntityReference;
+import org.openecomp.datarouter.util.DataRouterConstants;
+import org.openecomp.datarouter.util.EntityOxmReferenceHelper;
+import org.openecomp.datarouter.util.ExternalOxmModelProcessor;
+import org.openecomp.datarouter.util.OxmModelLoader;
+import org.openecomp.datarouter.util.RouterServiceUtil;
+import org.openecomp.datarouter.util.SearchSuggestionPermutation;
+import org.openecomp.datarouter.util.Version;
+import org.openecomp.datarouter.util.VersionedOxmEntities;
+import org.openecomp.restclient.client.Headers;
+import org.openecomp.restclient.client.OperationResult;
+import org.openecomp.restclient.client.RestClient;
+import org.openecomp.restclient.rest.HttpUtil;
+import org.slf4j.MDC;
+
+public class EntityEventPolicy implements Processor {
+
+ public static final String additionalInfo = "Response of AAIEntityEventPolicy";
+ private static final String entitySearchSchema = "entitysearch_schema.json";
+ private static final String topographicalSearchSchema = "topographysearch_schema.json";
+ private Collection<ExternalOxmModelProcessor> externalOxmModelProcessors;
+ RestClient searchClient = null;
+
+ private final String EVENT_HEADER = "event-header";
+ private final String ENTITY_HEADER = "entity";
+ private final String ACTION_CREATE = "create";
+ private final String ACTION_DELETE = "delete";
+ private final String ACTION_UPDATE = "update";
+ private final String PROCESS_AAI_EVENT = "Process AAI Event";
+ private final String TOPO_LAT = "latitude";
+ private final String TOPO_LONG = "longitude";
+
+ private final List<String> SUPPORTED_ACTIONS =
+ Arrays.asList(ACTION_CREATE, ACTION_UPDATE, ACTION_DELETE);
+
+ Map<String, DynamicJAXBContext> oxmVersionContextMap = new HashMap<>();
+ private String oxmVersion = null;
+
+ private String entityIndexTarget = null;
+ private String entitySearchTarget = null;
+ private String topographicalIndexTarget = null;
+ private String topographicalSearchTarget = null;
+ private String autoSuggestSearchTarget = null;
+ private String aggregationSearchVnfTarget = null;
+
+ private String srcDomain;
+
+ private Logger logger;
+ private Logger metricsLogger;
+ private Logger auditLogger;
+
+ public enum ResponseType {
+ SUCCESS, PARTIAL_SUCCESS, FAILURE;
+ };
+
+ public EntityEventPolicy(EntityEventPolicyConfig config) {
+ LoggerFactory loggerFactoryInstance = LoggerFactory.getInstance();
+ logger = loggerFactoryInstance.getLogger(EntityEventPolicy.class.getName());
+ metricsLogger = loggerFactoryInstance.getMetricsLogger(EntityEventPolicy.class.getName());
+ auditLogger = loggerFactoryInstance.getAuditLogger(EntityEventPolicy.class.getName());
+
+ srcDomain = config.getSourceDomain();
+
+ entityIndexTarget =
+ EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(), config.getSearchEndpoint(),
+ config.getSearchEntitySearchIndex());
+
+ entitySearchTarget =
+ EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(), config.getSearchEndpoint(),
+ config.getSearchEntitySearchIndex(), config.getSearchEndpointDocuments());
+
+ topographicalIndexTarget =
+ EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(), config.getSearchEndpoint(),
+ config.getSearchTopographySearchIndex());
+
+ topographicalSearchTarget = EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(),
+ config.getSearchEndpoint(), config.getSearchTopographySearchIndex());
+
+ // Create REST client for search service
+ searchClient = new RestClient().validateServerHostname(false).validateServerCertChain(true)
+ .clientCertFile(DataRouterConstants.DR_HOME_AUTH + config.getSearchCertName())
+ .clientCertPassword(Password.deobfuscate(config.getSearchKeystorePwd()))
+ .trustStore(DataRouterConstants.DR_HOME_AUTH + config.getSearchKeystore());
+
+ autoSuggestSearchTarget =
+ EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(), config.getSearchEndpoint(),
+ config.getSearchEntityAutoSuggestIndex(), config.getSearchEndpointDocuments());
+
+ aggregationSearchVnfTarget =
+ EntityEventPolicy.concatSubUri(config.getSearchBaseUrl(), config.getSearchEndpoint(),
+ config.getSearchAggregationVnfIndex(), config.getSearchEndpointDocuments());
+
+ this.externalOxmModelProcessors = new ArrayList<ExternalOxmModelProcessor>();
+ this.externalOxmModelProcessors.add(EntityOxmReferenceHelper.getInstance());
+ OxmModelLoader.registerExternalOxmModelProcessors(externalOxmModelProcessors);
+ OxmModelLoader.loadModels();
+ oxmVersionContextMap = OxmModelLoader.getVersionContextMap();
+ parseLatestOxmVersion();
+ }
+
+ private void parseLatestOxmVersion() {
+ int latestVersion = -1;
+ if (oxmVersionContextMap != null) {
+ Iterator it = oxmVersionContextMap.entrySet().iterator();
+ while (it.hasNext()) {
+ Map.Entry pair = (Map.Entry) it.next();
+
+ String version = pair.getKey().toString();
+ int versionNum = Integer.parseInt(version.substring(1, 2));
+
+ if (versionNum > latestVersion) {
+ latestVersion = versionNum;
+ oxmVersion = pair.getKey().toString();
+ }
+
+ logger.info(EntityEventPolicyMsgs.PROCESS_OXM_MODEL_FOUND, pair.getKey().toString());
+ }
+ } else {
+ logger.error(EntityEventPolicyMsgs.PROCESS_OXM_MODEL_MISSING, "");
+ }
+ }
+
+ public void startup() {
+
+ // Create the indexes in the search service if they do not already exist.
+ createSearchIndex(entityIndexTarget, entitySearchSchema);
+ createSearchIndex(topographicalIndexTarget, topographicalSearchSchema);
+
+ logger.info(EntityEventPolicyMsgs.ENTITY_EVENT_POLICY_REGISTERED);
+ }
+
+ /**
+ * Creates an index through the search db abstraction
+ *
+ * @param searchRESTClient
+ * the REST client configured to contact the search db
+ * abstraction
+ * @param searchTarget
+ * the URL to attempt to create the search index
+ * @param schemaLocation
+ * the location of the mappings file for the index
+ */
+ private void createSearchIndex(String searchTarget, String schemaLocation) {
+
+ logger.debug("Creating search index, searchTarget = " + searchTarget + ", schemaLocation = " + schemaLocation);
+
+ MultivaluedMap<String, String> headers = new MultivaluedMapImpl();
+ headers.put("Accept", Arrays.asList("application/json"));
+ headers.put(Headers.FROM_APP_ID, Arrays.asList("DL"));
+ headers.put(Headers.TRANSACTION_ID, Arrays.asList(UUID.randomUUID().toString()));
+
+ try {
+
+ OperationResult result = searchClient.put(searchTarget, loadFileData(schemaLocation), headers,
+ MediaType.APPLICATION_JSON_TYPE, null);
+
+ if (!HttpUtil.isHttpResponseClassSuccess(result.getResultCode())) {
+ logger.error(EntityEventPolicyMsgs.FAIL_TO_CREATE_SEARCH_INDEX, searchTarget, result.getFailureCause());
+ } else {
+ logger.info(EntityEventPolicyMsgs.SEARCH_INDEX_CREATE_SUCCESS, searchTarget);
+ }
+
+ } catch (Exception e) {
+ logger.error(EntityEventPolicyMsgs.FAIL_TO_CREATE_SEARCH_INDEX, searchTarget, e.getLocalizedMessage());
+ }
+ }
+
+ /**
+ * Convenience method to load up all the data from a file into a string
+ *
+ * @param filename the filename to read from disk
+ * @return the data contained within the file
+ * @throws Exception
+ */
+ protected String loadFileData(String filename) throws Exception {
+ StringBuilder data = new StringBuilder();
+ try {
+ BufferedReader in = new BufferedReader(new InputStreamReader(
+ EntityEventPolicy.class.getClassLoader().getResourceAsStream("/" + filename),
+ StandardCharsets.UTF_8));
+ String line;
+
+ while ((line = in.readLine()) != null) {
+ data.append(line);
+ }
+ } catch (Exception e) {
+ throw new Exception("Failed to read from file = " + filename + ".", e);
+ }
+
+ return data.toString();
+ }
+
+
+ /**
+ * Convert object to json.
+ *
+ * @param object the object
+ * @param pretty the pretty
+ * @return the string
+ * @throws JsonProcessingException the json processing exception
+ */
+ public static String convertObjectToJson(Object object, boolean pretty)
+ throws JsonProcessingException {
+ ObjectWriter ow = null;
+
+ if (pretty) {
+ ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+
+ } else {
+ ow = new ObjectMapper().writer();
+ }
+
+ return ow.writeValueAsString(object);
+ }
+
+ public void returnWithError(Exchange exchange, String payload, String errorMsg){
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE, errorMsg);
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE, errorMsg, payload);
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ }
+
+ @Override
+ public void process(Exchange exchange) throws Exception {
+
+ long startTime = System.currentTimeMillis();
+
+ String uebPayload = exchange.getIn().getBody().toString();
+
+ JsonNode uebAsJson =null;
+ ObjectMapper mapper = new ObjectMapper();
+ try{
+ uebAsJson = mapper.readTree(uebPayload);
+ } catch (IOException e){
+ returnWithError(exchange, uebPayload, "Invalid Payload");
+ return;
+ }
+
+ // Load the UEB payload data, any errors will result in a failure and discard
+ JSONObject uebObjHeader = getUebHeaderAsJson(uebPayload);
+ if (uebObjHeader == null) {
+ returnWithError(exchange, uebPayload, "Payload is missing event-header");
+ return;
+ }
+
+ UebEventHeader eventHeader = null;
+ eventHeader = initializeUebEventHeader(uebObjHeader.toString());
+
+ // Get src domain from header; discard event if not originated from same domain
+ String payloadSrcDomain = eventHeader.getDomain();
+ if (payloadSrcDomain == null || !payloadSrcDomain.equalsIgnoreCase(this.srcDomain)) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Unrecognized source domain '" + payloadSrcDomain + "'", uebPayload);
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Unrecognized source domain '" + payloadSrcDomain + "'");
+
+ setResponse(exchange, ResponseType.SUCCESS, additionalInfo);
+ return;
+ }
+
+ DynamicJAXBContext oxmJaxbContext = loadOxmContext(oxmVersion.toLowerCase());
+ if (oxmJaxbContext == null) {
+ logger.error(EntityEventPolicyMsgs.OXM_VERSION_NOT_SUPPORTED, oxmVersion);
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE, "OXM version mismatch",
+ uebPayload);
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ String action = eventHeader.getAction();
+ if (action == null || !SUPPORTED_ACTIONS.contains(action.toLowerCase())) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Unrecognized action '" + action + "'", uebPayload);
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Unrecognized action '" + action + "'");
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ String entityType = eventHeader.getEntityType();
+ if (entityType == null) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload header missing entity type", uebPayload);
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload header missing entity type");
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ String topEntityType = eventHeader.getTopEntityType();
+ if (topEntityType == null) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload header missing top entity type", uebPayload);
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload header top missing entity type");
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ String entityLink = eventHeader.getEntityLink();
+ if (entityLink == null) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload header missing entity link", uebPayload);
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload header missing entity link");
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ // log the fact that all data are in good shape
+ logger.info(EntityEventPolicyMsgs.PROCESS_AAI_ENTITY_EVENT_POLICY_NONVERBOSE, action,
+ entityType);
+ logger.debug(EntityEventPolicyMsgs.PROCESS_AAI_ENTITY_EVENT_POLICY_VERBOSE, action, entityType,
+ uebPayload);
+
+
+ // Process for building AaiEventEntity object
+ String[] entityTypeArr = entityType.split("-");
+ String oxmEntityType = "";
+ for (String entityWord : entityTypeArr) {
+ oxmEntityType += entityWord.substring(0, 1).toUpperCase() + entityWord.substring(1);
+ }
+
+ List<String> searchableAttr =
+ getOxmAttributes(uebPayload, oxmJaxbContext, oxmEntityType, entityType, "searchable");
+ if (searchableAttr == null) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Searchable attribute not found for payload entity type '" + entityType + "'");
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Searchable attribute not found for payload entity type '" + entityType + "'",
+ uebPayload);
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ String entityPrimaryKeyFieldName =
+ getEntityPrimaryKeyFieldName(oxmJaxbContext, uebPayload, oxmEntityType, entityType);
+ String entityPrimaryKeyFieldValue = lookupValueUsingKey(uebPayload, entityPrimaryKeyFieldName);
+ if (entityPrimaryKeyFieldValue == null || entityPrimaryKeyFieldValue.isEmpty()) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload missing primary key attribute");
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload missing primary key attribute", uebPayload);
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ AaiEventEntity aaiEventEntity = new AaiEventEntity();
+
+ /*
+ * Use the OXM Model to determine the primary key field name based on the entity-type
+ */
+
+ aaiEventEntity.setEntityPrimaryKeyName(entityPrimaryKeyFieldName);
+ aaiEventEntity.setEntityPrimaryKeyValue(entityPrimaryKeyFieldValue);
+ aaiEventEntity.setEntityType(entityType);
+ aaiEventEntity.setLink(entityLink);
+
+ if (!getSearchTags(aaiEventEntity, searchableAttr, uebPayload, action)) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload missing searchable attribute for entity type '" + entityType + "'");
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload missing searchable attribute for entity type '" + entityType + "'", uebPayload);
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+
+ }
+
+ try {
+ aaiEventEntity.deriveFields();
+
+ } catch (NoSuchAlgorithmException e) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Cannot create unique SHA digest");
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Cannot create unique SHA digest", uebPayload);
+
+ setResponse(exchange, ResponseType.FAILURE, additionalInfo);
+ return;
+ }
+
+ handleSearchServiceOperation(aaiEventEntity, action, this.entitySearchTarget);
+
+ handleTopographicalData(uebPayload, action, entityType, oxmEntityType, oxmJaxbContext,
+ entityPrimaryKeyFieldName, entityPrimaryKeyFieldValue);
+
+ /*
+ * Use the versioned OXM Entity class to get access to cross-entity reference helper collections
+ */
+ VersionedOxmEntities oxmEntities =
+ EntityOxmReferenceHelper.getInstance().getVersionedOxmEntities(Version.valueOf(oxmVersion));
+
+ /**
+ * If the entity type is "customer", the below check will return true if any nested entityType
+ * in that model could contain a CER based on the OXM model version that has been loaded.
+ */
+
+ if (oxmEntities != null && oxmEntities.entityModelContainsCrossEntityReference(topEntityType)) {
+
+ // We know the model "can" contain a CER reference definition, let's process a bit more
+
+ HashMap<String, CrossEntityReference> crossEntityRefMap =
+ oxmEntities.getCrossEntityReferences();
+
+ JSONObject entityJsonObject = getUebEntity(uebPayload);
+
+ JsonNode entityJsonNode = convertToJsonNode(entityJsonObject.toString());
+
+ for (String key : crossEntityRefMap.keySet()) {
+
+ /*
+ * if we know service-subscription is in the tree, then we can pull our all instances and
+ * process from there.
+ */
+
+ CrossEntityReference cerDescriptor = crossEntityRefMap.get(key);
+
+ ArrayList<JsonNode> foundNodes = new ArrayList<JsonNode>();
+
+ RouterServiceUtil.extractObjectsByKey(entityJsonNode, key, foundNodes);
+
+ if (foundNodes.size() > 0) {
+
+ for (JsonNode n : foundNodes) {
+
+ List<String> extractedParentEntityAttributeValues = new ArrayList<String>();
+
+ RouterServiceUtil.extractFieldValuesFromObject(n, cerDescriptor.getAttributeNames(),
+ extractedParentEntityAttributeValues);
+
+ List<JsonNode> nestedTargetEntityInstances = new ArrayList<JsonNode>();
+ RouterServiceUtil.extractObjectsByKey(n, cerDescriptor.getTargetEntityType(),
+ nestedTargetEntityInstances);
+
+ for (JsonNode targetEntityInstance : nestedTargetEntityInstances) {
+ /*
+ * Now:
+ * 1. build the AAIEntityType (IndexDocument) based on the extract entity
+ * 2. Get data from ES
+ * 3. Extract ETAG
+ * 4. Merge ES Doc + AAIEntityType + Extracted Parent Cross-Entity-Reference Values
+ * 5. Put data into ES with ETAG + updated doc
+ */
+
+ OxmEntityDescriptor searchableDescriptor =
+ oxmEntities.getSearchableEntityDescriptor(cerDescriptor.getTargetEntityType());
+
+ if (searchableDescriptor != null) {
+
+ if (!searchableDescriptor.getSearchableAttributes().isEmpty()) {
+
+ AaiEventEntity entityToSync = null;
+
+ try {
+
+ entityToSync = getPopulatedEntity(targetEntityInstance, searchableDescriptor);
+
+ /*
+ * Ready to do some ElasticSearch ops
+ */
+
+ for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) {
+ entityToSync
+ .addCrossEntityReferenceValue(parentCrossEntityReferenceAttributeValue);
+ }
+
+ entityToSync.setEntityPrimaryKeyName(entityPrimaryKeyFieldName);
+ entityToSync.setLink(entityLink);
+ entityToSync.deriveFields();
+
+ syncEntity(entityToSync);
+
+ } catch (NoSuchAlgorithmException e) {
+ e.printStackTrace();
+ }
+ }
+ } else {
+ logger.debug(EntityEventPolicyMsgs.CROSS_ENTITY_REFERENCE_SYNC,
+ "failure to find searchable descriptor for type "
+ + cerDescriptor.getTargetEntityType());
+ }
+ }
+
+ }
+
+ } else {
+ logger.debug(EntityEventPolicyMsgs.CROSS_ENTITY_REFERENCE_SYNC,
+ "failed to find 0 instances of cross-entity-reference with entity " + key);
+ }
+
+ }
+
+ } else {
+ logger.info(EntityEventPolicyMsgs.CROSS_ENTITY_REFERENCE_SYNC, "skipped due to OXM model for "
+ + topEntityType + " does not contain a cross-entity-reference entity");
+ }
+
+ /*
+ * Process for autosuggestable entities
+ */
+ if (oxmEntities != null) {
+ Map<String, OxmEntityDescriptor> rootDescriptor =
+ oxmEntities.getSuggestableEntityDescriptors();
+ if (!rootDescriptor.isEmpty()) {
+ List<String> suggestibleAttributes = extractSuggestableAttr(oxmEntities, entityType);
+
+ if (suggestibleAttributes == null) {
+ return;
+ }
+
+ List<String> suggestionAliases = extractAliasForSuggestableEntity(oxmEntities, entityType);
+ AggregationEntity ae = new AggregationEntity();
+ ae.setLink(entityLink);
+ ae.deriveFields(uebAsJson);
+
+ handleSuggestiveSearchData(ae, action, this.aggregationSearchVnfTarget);
+
+ /*
+ * It was decided to silently ignore DELETE requests for resources we don't allow to be
+ * deleted. e.g. auto-suggestion deletion is not allowed while aggregation deletion is.
+ */
+ if (!ACTION_DELETE.equalsIgnoreCase(action)) {
+ SearchSuggestionPermutation searchSuggestionPermutation =
+ new SearchSuggestionPermutation();
+ List<ArrayList<String>> permutationsOfStatuses =
+ searchSuggestionPermutation.getSuggestionsPermutation(suggestibleAttributes);
+
+ // Now we have a list of all possible permutations for the status that are
+ // defined for this entity type. Try inserting a document for every combination.
+ for (ArrayList<String> permutation : permutationsOfStatuses) {
+ SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity();
+ suggestionSearchEntity.setEntityType(entityType);
+ suggestionSearchEntity.setSuggestableAttr(permutation);
+ suggestionSearchEntity.setPayloadFromResponse(uebAsJson);
+ suggestionSearchEntity.setEntityTypeAliases(suggestionAliases);
+ suggestionSearchEntity.setSuggestionInputPermutations(
+ suggestionSearchEntity.generateSuggestionInputPermutations());
+
+ if (suggestionSearchEntity.isSuggestableDoc()) {
+ try {
+ suggestionSearchEntity.deriveFields();
+ } catch (NoSuchAlgorithmException e) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_UPDATING_SEARCH_SUGGESTION_DATA,
+ "Cannot create unique SHA digest for search suggestion data. Exception: "
+ + e.getLocalizedMessage());
+ }
+
+ handleSuggestiveSearchData(suggestionSearchEntity, action,
+ this.autoSuggestSearchTarget);
+ }
+ }
+ }
+ }
+ }
+
+ long stopTime = System.currentTimeMillis();
+
+ metricsLogger.info(EntityEventPolicyMsgs.OPERATION_RESULT_NO_ERRORS, PROCESS_AAI_EVENT,
+ String.valueOf(stopTime - startTime));
+
+ setResponse(exchange, ResponseType.SUCCESS, additionalInfo);
+ return;
+ }
+
+ public List<String> extractSuggestableAttr(VersionedOxmEntities oxmEntities, String entityType) {
+ // Extract suggestable attributes
+ Map<String, OxmEntityDescriptor> rootDescriptor = oxmEntities.getSuggestableEntityDescriptors();
+
+ if (rootDescriptor == null) {
+ return null;
+ }
+
+ OxmEntityDescriptor desc = rootDescriptor.get(entityType);
+
+ if (desc == null) {
+ return null;
+ }
+
+ return desc.getSuggestableAttributes();
+ }
+
+ public List<String> extractAliasForSuggestableEntity(VersionedOxmEntities oxmEntities,
+ String entityType) {
+
+ // Extract alias
+ Map<String, OxmEntityDescriptor> rootDescriptor = oxmEntities.getEntityAliasDescriptors();
+
+ if (rootDescriptor == null) {
+ return null;
+ }
+
+ OxmEntityDescriptor desc = rootDescriptor.get(entityType);
+ return desc.getAlias();
+ }
+
+ private void setResponse(Exchange exchange, ResponseType responseType, String additionalInfo) {
+
+ exchange.getOut().setHeader("ResponseType", responseType.toString());
+ exchange.getOut().setBody(additionalInfo);
+ }
+
+ public void extractDetailsForAutosuggestion(VersionedOxmEntities oxmEntities, String entityType,
+ List<String> suggestableAttr, List<String> alias) {
+
+ // Extract suggestable attributes
+ Map<String, OxmEntityDescriptor> rootDescriptor = oxmEntities.getSuggestableEntityDescriptors();
+
+ OxmEntityDescriptor desc = rootDescriptor.get(entityType);
+ suggestableAttr = desc.getSuggestableAttributes();
+
+ // Extract alias
+ rootDescriptor = oxmEntities.getEntityAliasDescriptors();
+ desc = rootDescriptor.get(entityType);
+ alias = desc.getAlias();
+ }
+
+ /*
+ * Load the UEB JSON payload, any errors would result to a failure case response.
+ */
+ private JSONObject getUebHeaderAsJson(String payload) {
+
+ JSONObject uebJsonObj;
+ JSONObject uebObjHeader;
+
+ try {
+ uebJsonObj = new JSONObject(payload);
+ } catch (JSONException e) {
+ logger.debug(EntityEventPolicyMsgs.UEB_INVALID_PAYLOAD_JSON_FORMAT, payload);
+ logger.error(EntityEventPolicyMsgs.UEB_INVALID_PAYLOAD_JSON_FORMAT, payload);
+ return null;
+ }
+
+ if (uebJsonObj.has(EVENT_HEADER)) {
+ uebObjHeader = uebJsonObj.getJSONObject(EVENT_HEADER);
+ } else {
+ logger.debug(EntityEventPolicyMsgs.UEB_FAILED_TO_PARSE_PAYLOAD, EVENT_HEADER);
+ logger.error(EntityEventPolicyMsgs.UEB_FAILED_TO_PARSE_PAYLOAD, EVENT_HEADER);
+ return null;
+ }
+
+ return uebObjHeader;
+ }
+
+
+ private UebEventHeader initializeUebEventHeader(String payload) {
+
+ UebEventHeader eventHeader = null;
+ ObjectMapper mapper = new ObjectMapper();
+
+ // Make sure that were were actually passed in a valid string.
+ if (payload == null || payload.isEmpty()) {
+ logger.debug(EntityEventPolicyMsgs.UEB_FAILED_TO_PARSE_PAYLOAD, EVENT_HEADER);
+ logger.error(EntityEventPolicyMsgs.UEB_FAILED_TO_PARSE_PAYLOAD, EVENT_HEADER);
+
+ return eventHeader;
+ }
+
+ // Marshal the supplied string into a UebEventHeader object.
+ try {
+ eventHeader = mapper.readValue(payload, UebEventHeader.class);
+ } catch (JsonProcessingException e) {
+ logger.error(EntityEventPolicyMsgs.UEB_FAILED_UEBEVENTHEADER_CONVERSION, e.toString());
+ } catch (Exception e) {
+ logger.error(EntityEventPolicyMsgs.UEB_FAILED_UEBEVENTHEADER_CONVERSION, e.toString());
+ }
+
+ if (eventHeader != null) {
+ logger.debug(EntityEventPolicyMsgs.UEB_EVENT_HEADER_PARSED, eventHeader.toString());
+ }
+
+ return eventHeader;
+
+ }
+
+
+ private String getEntityPrimaryKeyFieldName(DynamicJAXBContext oxmJaxbContext, String payload,
+ String oxmEntityType, String entityType) {
+
+ DynamicType entity = oxmJaxbContext.getDynamicType(oxmEntityType);
+ if (entity == null) {
+ return null;
+ }
+
+ List<DatabaseField> list = entity.getDescriptor().getPrimaryKeyFields();
+ if (list != null && !list.isEmpty()) {
+ String keyName = list.get(0).getName();
+ return keyName.substring(0, keyName.indexOf('/'));
+ }
+
+ return "";
+ }
+
+ private String lookupValueUsingKey(String payload, String key) throws JSONException {
+ JsonNode jsonNode = convertToJsonNode(payload);
+ return RouterServiceUtil.recursivelyLookupJsonPayload(jsonNode, key);
+ }
+
+ private JsonNode convertToJsonNode(String payload) {
+
+ ObjectMapper mapper = new ObjectMapper();
+ JsonNode jsonNode = null;
+ try {
+ jsonNode = mapper.readTree(mapper.getJsonFactory().createJsonParser(payload));
+ } catch (IOException e) {
+ logger.debug(EntityEventPolicyMsgs.FAILED_TO_PARSE_UEB_PAYLOAD, ENTITY_HEADER + " missing",
+ payload.toString());
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_PARSE_UEB_PAYLOAD, ENTITY_HEADER + " missing",
+ "");
+ }
+
+ return jsonNode;
+ }
+
+ private boolean getSearchTags(AaiEventEntity aaiEventEntity, List<String> searchableAttr,
+ String payload, String action) {
+
+ boolean hasSearchableAttr = false;
+ for (String searchTagField : searchableAttr) {
+ String searchTagValue = null;
+ if (searchTagField.equalsIgnoreCase(aaiEventEntity.getEntityPrimaryKeyName())) {
+ searchTagValue = aaiEventEntity.getEntityPrimaryKeyValue();
+ } else {
+ searchTagValue = this.lookupValueUsingKey(payload, searchTagField);
+ }
+
+ if (searchTagValue != null && !searchTagValue.isEmpty()) {
+ hasSearchableAttr = true;
+ aaiEventEntity.addSearchTagWithKey(searchTagValue, searchTagField);
+ }
+ }
+ return hasSearchableAttr;
+ }
+
+ /*
+ * Check if OXM version is available. If available, load it.
+ */
+ private DynamicJAXBContext loadOxmContext(String version) {
+ if (version == null) {
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_FIND_OXM_VERSION, version);
+ return null;
+ }
+
+ return oxmVersionContextMap.get(version);
+ }
+
+ private List<String> getOxmAttributes(String payload, DynamicJAXBContext oxmJaxbContext,
+ String oxmEntityType, String entityType, String fieldName) {
+
+ DynamicType entity = (DynamicType) oxmJaxbContext.getDynamicType(oxmEntityType);
+ if (entity == null) {
+ return null;
+ }
+
+ /*
+ * Check for searchable XML tag
+ */
+ List<String> fieldValues = null;
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+ if (entry.getKey().equalsIgnoreCase(fieldName)) {
+ fieldValues = Arrays.asList(entry.getValue().split(","));
+ break;
+ }
+ }
+
+ return fieldValues;
+ }
+
+ private JSONObject getUebEntity(String payload) {
+ JSONObject uebJsonObj;
+
+ try {
+ uebJsonObj = new JSONObject(payload);
+ } catch (JSONException e) {
+ logger.debug(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_VERBOSE,
+ "Payload has invalid JSON Format", payload.toString());
+ logger.error(EntityEventPolicyMsgs.DISCARD_AAI_EVENT_NONVERBOSE,
+ "Payload has invalid JSON Format");
+ return null;
+ }
+
+ if (uebJsonObj.has(ENTITY_HEADER)) {
+ return uebJsonObj.getJSONObject(ENTITY_HEADER);
+ } else {
+ logger.debug(EntityEventPolicyMsgs.FAILED_TO_PARSE_UEB_PAYLOAD, ENTITY_HEADER + " missing",
+ payload.toString());
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_PARSE_UEB_PAYLOAD, ENTITY_HEADER + " missing");
+ return null;
+ }
+ }
+
+ protected AaiEventEntity getPopulatedEntity(JsonNode entityNode,
+ OxmEntityDescriptor resultDescriptor) {
+ AaiEventEntity d = new AaiEventEntity();
+
+ d.setEntityType(resultDescriptor.getEntityName());
+
+ List<String> primaryKeyValues = new ArrayList<String>();
+ String pkeyValue = null;
+
+ for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) {
+ pkeyValue = RouterServiceUtil.getNodeFieldAsText(entityNode, keyName);
+ if (pkeyValue != null) {
+ primaryKeyValues.add(pkeyValue);
+ } else {
+ // logger.warn("getPopulatedDocument(), pKeyValue is null for entityType = " +
+ // resultDescriptor.getEntityName());
+ logger.error(EntityEventPolicyMsgs.PRIMARY_KEY_NULL_FOR_ENTITY_TYPE,
+ resultDescriptor.getEntityName());
+ }
+ }
+
+ final String primaryCompositeKeyValue = RouterServiceUtil.concatArray(primaryKeyValues, "/");
+ d.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
+
+ final List<String> searchTagFields = resultDescriptor.getSearchableAttributes();
+
+ /*
+ * Based on configuration, use the configured field names for this entity-Type to build a
+ * multi-value collection of search tags for elastic search entity search criteria.
+ */
+
+
+ for (String searchTagField : searchTagFields) {
+ String searchTagValue = RouterServiceUtil.getNodeFieldAsText(entityNode, searchTagField);
+ if (searchTagValue != null && !searchTagValue.isEmpty()) {
+ d.addSearchTagWithKey(searchTagValue, searchTagField);
+ }
+ }
+
+ return d;
+ }
+
+ private void syncEntity(AaiEventEntity aaiEventEntity) {
+ try {
+ Map<String, List<String>> headers = new HashMap<>();
+ headers.put(Headers.FROM_APP_ID, Arrays.asList("DataLayer"));
+ headers.put(Headers.TRANSACTION_ID, Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+
+ String entityId = aaiEventEntity.getId();
+
+ // Run the GET to retrieve the ETAG from the search service
+ OperationResult storedEntity =
+ searchClient.get(entitySearchTarget + entityId, headers, MediaType.APPLICATION_JSON_TYPE);
+
+ if (HttpUtil.isHttpResponseClassSuccess(storedEntity.getResultCode())) {
+ List<String> etag = storedEntity.getHeaders().get(Headers.ETAG);
+
+ if (etag != null && etag.size() > 0) {
+ headers.put(Headers.IF_MATCH, etag);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE,
+ entitySearchTarget + entityId, entityId);
+ }
+
+ searchClient.put(entitySearchTarget + entityId, aaiEventEntity.getAsJson(), headers,
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ } else {
+
+ if (storedEntity.getResultCode() == 404) {
+ // entity not found, so attempt to do a PUT
+ searchClient.put(entitySearchTarget + entityId, aaiEventEntity.getAsJson(), headers,
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ } else {
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE,
+ aaiEventEntity.getId(), "SYNC_ENTITY");
+ }
+ }
+ } catch (IOException e) {
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE,
+ aaiEventEntity.getId(), "SYNC_ENTITY");
+ }
+ }
+
+ /**
+ * Perform create, read, update or delete (CRUD) operation on search engine's suggestive search
+ * index
+ *
+ * @param eventEntity Entity/data to use in operation
+ * @param action The operation to perform
+ * @param target Resource to perform the operation on
+ * @param allowDeleteEvent Allow delete operation to be performed on resource
+ */
+ private void handleSuggestiveSearchData(DocumentStoreDataEntity eventEntity, String action,
+ String target) {
+ try {
+ Map<String, List<String>> headers = new HashMap<>();
+ headers.put(Headers.FROM_APP_ID, Arrays.asList("DataLayer"));
+ headers.put(Headers.TRANSACTION_ID, Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+
+ String entityId = eventEntity.getId();
+
+ if ((action.equalsIgnoreCase(ACTION_CREATE) && entityId != null)
+ || action.equalsIgnoreCase(ACTION_UPDATE)) {
+ // Run the GET to retrieve the ETAG from the search service
+ OperationResult storedEntity =
+ searchClient.get(target + entityId, headers, MediaType.APPLICATION_JSON_TYPE);
+
+ if (HttpUtil.isHttpResponseClassSuccess(storedEntity.getResultCode())) {
+ List<String> etag = storedEntity.getHeaders().get(Headers.ETAG);
+
+ if (etag != null && etag.size() > 0) {
+ headers.put(Headers.IF_MATCH, etag);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+ }
+
+ String eventEntityStr = eventEntity.getAsJson();
+
+ if (eventEntityStr != null) {
+ searchClient.put(target + entityId, eventEntity.getAsJson(), headers,
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ }
+ } else if (action.equalsIgnoreCase(ACTION_CREATE)) {
+ String eventEntityStr = eventEntity.getAsJson();
+
+ if (eventEntityStr != null) {
+ searchClient.post(target, eventEntityStr, headers, MediaType.APPLICATION_JSON_TYPE,
+ MediaType.APPLICATION_JSON_TYPE);
+ }
+ } else if (action.equalsIgnoreCase(ACTION_DELETE)) {
+ // Run the GET to retrieve the ETAG from the search service
+ OperationResult storedEntity =
+ searchClient.get(target + entityId, headers, MediaType.APPLICATION_JSON_TYPE);
+
+ if (HttpUtil.isHttpResponseClassSuccess(storedEntity.getResultCode())) {
+ List<String> etag = storedEntity.getHeaders().get(Headers.ETAG);
+
+ if (etag != null && etag.size() > 0) {
+ headers.put(Headers.IF_MATCH, etag);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+
+ searchClient.delete(target + eventEntity.getId(), headers, null);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+ } else {
+ logger.error(EntityEventPolicyMsgs.ENTITY_OPERATION_NOT_SUPPORTED, action);
+ }
+ } catch (IOException e) {
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE, eventEntity.getId(),
+ action);
+ }
+ }
+
+ private void handleSearchServiceOperation(DocumentStoreDataEntity eventEntity, String action,
+ String target) {
+ try {
+
+ Map<String, List<String>> headers = new HashMap<>();
+ headers.put(Headers.FROM_APP_ID, Arrays.asList("DataLayer"));
+ headers.put(Headers.TRANSACTION_ID, Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID)));
+
+ String entityId = eventEntity.getId();
+
+ // System.out.println("aaiEventEntity as json = " + aaiEventEntity.getAsJson());
+
+ if ((action.equalsIgnoreCase(ACTION_CREATE) && entityId != null)
+ || action.equalsIgnoreCase(ACTION_UPDATE)) {
+
+ // Run the GET to retrieve the ETAG from the search service
+ OperationResult storedEntity =
+ searchClient.get(target + entityId, headers, MediaType.APPLICATION_JSON_TYPE);
+
+ if (HttpUtil.isHttpResponseClassSuccess(storedEntity.getResultCode())) {
+ List<String> etag = storedEntity.getHeaders().get(Headers.ETAG);
+
+ if (etag != null && etag.size() > 0) {
+ headers.put(Headers.IF_MATCH, etag);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+ }
+
+ searchClient.put(target + entityId, eventEntity.getAsJson(), headers,
+ MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE);
+ } else if (action.equalsIgnoreCase(ACTION_CREATE)) {
+ searchClient.post(target, eventEntity.getAsJson(), headers, MediaType.APPLICATION_JSON_TYPE,
+ MediaType.APPLICATION_JSON_TYPE);
+ } else if (action.equalsIgnoreCase(ACTION_DELETE)) {
+ // Run the GET to retrieve the ETAG from the search service
+ OperationResult storedEntity =
+ searchClient.get(target + entityId, headers, MediaType.APPLICATION_JSON_TYPE);
+
+ if (HttpUtil.isHttpResponseClassSuccess(storedEntity.getResultCode())) {
+ List<String> etag = storedEntity.getHeaders().get(Headers.ETAG);
+
+ if (etag != null && etag.size() > 0) {
+ headers.put(Headers.IF_MATCH, etag);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+
+ searchClient.delete(target + eventEntity.getId(), headers, null);
+ } else {
+ logger.error(EntityEventPolicyMsgs.NO_ETAG_AVAILABLE_FAILURE, target + entityId,
+ entityId);
+ }
+ } else {
+ logger.error(EntityEventPolicyMsgs.ENTITY_OPERATION_NOT_SUPPORTED, action);
+ }
+ } catch (IOException e) {
+ logger.error(EntityEventPolicyMsgs.FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE, eventEntity.getId(),
+ action);
+ }
+ }
+
+ private void handleTopographicalData(String payload, String action, String entityType,
+ String oxmEntityType, DynamicJAXBContext oxmJaxbContext, String entityPrimaryKeyFieldName,
+ String entityPrimaryKeyFieldValue) {
+
+ Map<String, String> topoData = new HashMap<>();
+ String entityLink = "";
+ List<String> topographicalAttr =
+ getOxmAttributes(payload, oxmJaxbContext, oxmEntityType, entityType, "geoProps");
+ if (topographicalAttr == null) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_UPDATING_TOPOGRAPHY_DATA_NONVERBOSE,
+ "Topograhical attribute not found for payload entity type '" + entityType + "'");
+ logger.debug(EntityEventPolicyMsgs.DISCARD_UPDATING_TOPOGRAPHY_DATA_VERBOSE,
+ "Topograhical attribute not found for payload entity type '" + entityType + "'",
+ payload.toString());
+ } else {
+ entityLink = lookupValueUsingKey(payload, "entity-link");
+ for (String topoAttr : topographicalAttr) {
+ topoData.put(topoAttr, lookupValueUsingKey(payload, topoAttr));
+ }
+ updateTopographicalSearchDb(topoData, entityType, action, entityPrimaryKeyFieldName,
+ entityPrimaryKeyFieldValue, entityLink);
+ }
+
+ }
+
+ private void updateTopographicalSearchDb(Map<String, String> topoData, String entityType,
+ String action, String entityPrimaryKeyName, String entityPrimaryKeyValue, String entityLink) {
+
+ TopographicalEntity topoEntity = new TopographicalEntity();
+ topoEntity.setEntityPrimaryKeyName(entityPrimaryKeyName);
+ topoEntity.setEntityPrimaryKeyValue(entityPrimaryKeyValue);
+ topoEntity.setEntityType(entityType);
+ topoEntity.setLatitude(topoData.get(TOPO_LAT));
+ topoEntity.setLongitude(topoData.get(TOPO_LONG));
+ topoEntity.setSelfLink(entityLink);
+ try {
+ topoEntity.setId(TopographicalEntity.generateUniqueShaDigest(entityType, entityPrimaryKeyName,
+ entityPrimaryKeyValue));
+ } catch (NoSuchAlgorithmException e) {
+ logger.error(EntityEventPolicyMsgs.DISCARD_UPDATING_TOPOGRAPHY_DATA_VERBOSE,
+ "Cannot create unique SHA digest for topographical data.");
+ }
+
+ this.handleSearchServiceOperation(topoEntity, action, this.topographicalSearchTarget);
+ }
+
+
+ // put this here until we find a better spot
+ /**
+ * Helper utility to concatenate substrings of a URI together to form a proper URI.
+ *
+ * @param suburis the list of substrings to concatenate together
+ * @return the concatenated list of substrings
+ */
+ public static String concatSubUri(String... suburis) {
+ String finalUri = "";
+
+ for (String suburi : suburis) {
+
+ if (suburi != null) {
+ // Remove any leading / since we only want to append /
+ suburi = suburi.replaceFirst("^/*", "");
+
+ // Add a trailing / if one isn't already there
+ finalUri += suburi.endsWith("/") ? suburi : suburi + "/";
+ }
+ }
+
+ return finalUri;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicyConfig.java b/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicyConfig.java
new file mode 100644
index 0000000..8e14be1
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/policy/EntityEventPolicyConfig.java
@@ -0,0 +1,129 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.policy;
+
+public class EntityEventPolicyConfig {
+
+ private String sourceDomain;
+ private String searchBaseUrl;
+ private String searchEndpoint;
+ private String searchEndpointDocuments;
+ private String searchEntitySearchIndex;
+ private String searchTopographySearchIndex;
+ private String searchEntityAutoSuggestIndex;
+ private String searchAggregationVnfIndex;
+ private String searchCertName;
+ private String searchKeystorePwd;
+ private String searchKeystore;
+
+
+ public String getSourceDomain() {
+ return sourceDomain;
+ }
+
+ public void setSourceDomain(String sourceDomain) {
+ this.sourceDomain = sourceDomain;
+ }
+
+ public String getSearchBaseUrl() {
+ return searchBaseUrl;
+ }
+
+ public void setSearchBaseUrl(String searchBaseUrl) {
+ this.searchBaseUrl = searchBaseUrl;
+ }
+
+ public String getSearchEndpoint() {
+ return searchEndpoint;
+ }
+
+ public void setSearchEndpoint(String searchEndpoint) {
+ this.searchEndpoint = searchEndpoint;
+ }
+
+ public String getSearchEndpointDocuments() {
+ return searchEndpointDocuments;
+ }
+
+ public void setSearchEndpointDocuments(String searchEndpointDocuments) {
+ this.searchEndpointDocuments = searchEndpointDocuments;
+ }
+
+ public String getSearchEntitySearchIndex() {
+ return searchEntitySearchIndex;
+ }
+
+ public void setSearchEntitySearchIndex(String searchEntitySearchIndex) {
+ this.searchEntitySearchIndex = searchEntitySearchIndex;
+ }
+
+ public String getSearchTopographySearchIndex() {
+ return searchTopographySearchIndex;
+ }
+
+ public void setSearchTopographySearchIndex(String searchTopographySearchIndex) {
+ this.searchTopographySearchIndex = searchTopographySearchIndex;
+ }
+
+ public String getSearchEntityAutoSuggestIndex() {
+ return searchEntityAutoSuggestIndex;
+ }
+
+ public void setSearchEntityAutoSuggestIndex(String autoSuggestibleSearchEntitySearchIndex) {
+ this.searchEntityAutoSuggestIndex = autoSuggestibleSearchEntitySearchIndex;
+ }
+
+ public String getSearchCertName() {
+ return searchCertName;
+ }
+
+ public void setSearchCertName(String searchCertName) {
+ this.searchCertName = searchCertName;
+ }
+
+ public String getSearchKeystore() {
+ return searchKeystore;
+ }
+
+ public void setSearchKeystore(String searchKeystore) {
+ this.searchKeystore = searchKeystore;
+ }
+
+ public String getSearchKeystorePwd() {
+ return searchKeystorePwd;
+ }
+
+ public void setSearchKeystorePwd(String searchKeystorePwd) {
+ this.searchKeystorePwd = searchKeystorePwd;
+ }
+
+public String getSearchAggregationVnfIndex() {
+ return searchAggregationVnfIndex;
+}
+
+public void setSearchAggregationVnfIndex(String searchAggregationVnfIndex) {
+ this.searchAggregationVnfIndex = searchAggregationVnfIndex;
+}
+}
diff --git a/src/main/java/org/openecomp/datarouter/service/EchoService.java b/src/main/java/org/openecomp/datarouter/service/EchoService.java
new file mode 100644
index 0000000..e7f6368
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/service/EchoService.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.service;
+
+import org.openecomp.cl.api.LogFields;
+import org.openecomp.cl.api.LogLine;
+import org.openecomp.cl.api.Logger;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.cl.mdc.MdcContext;
+import org.openecomp.datarouter.logging.DataRouterMsgs;
+import org.openecomp.datarouter.util.DataRouterConstants;
+import org.slf4j.MDC;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+public class EchoService {
+
+ private static Logger logger = LoggerFactory.getInstance().getLogger(EchoService.class.getName());
+ private static Logger auditLogger =
+ LoggerFactory.getInstance().getAuditLogger(EchoService.class.getName());
+
+ @GET
+ @Path("echo/{input}")
+ @Produces("text/plain")
+ public String ping(@PathParam("input") String input, @Context HttpHeaders headers,
+ @Context UriInfo info, @Context HttpServletRequest req) {
+
+ String fromIp = req.getRemoteAddr();
+ String fromAppId = "";
+ String transId = null;
+
+ if (headers.getRequestHeaders().getFirst("X-FromAppId") != null) {
+ fromAppId = headers.getRequestHeaders().getFirst("X-FromAppId");
+ }
+
+ if ((headers.getRequestHeaders().getFirst("X-TransactionId") == null)
+ || headers.getRequestHeaders().getFirst("X-TransactionId").isEmpty()) {
+ transId = java.util.UUID.randomUUID().toString();
+ } else {
+ transId = headers.getRequestHeaders().getFirst("X-TransactionId");
+ }
+
+ MdcContext.initialize(transId, DataRouterConstants.DATA_ROUTER_SERVICE_NAME, "", fromAppId,
+ fromIp);
+
+ int status = 200;
+ String respStatusString = "";
+ if (Response.Status.fromStatusCode(status) != null) {
+ respStatusString = Response.Status.fromStatusCode(status).toString();
+ }
+
+ // Generate error log
+ logger.info(DataRouterMsgs.PROCESS_REST_REQUEST, req.getMethod(),
+ req.getRequestURL().toString(), req.getRemoteHost(), Integer.toString(status));
+
+ // Generate audit log.
+ auditLogger.info(DataRouterMsgs.PROCESS_REST_REQUEST,
+ new LogFields().setField(LogLine.DefinedFields.RESPONSE_CODE, status)
+ .setField(LogLine.DefinedFields.RESPONSE_DESCRIPTION, respStatusString),
+ (req != null) ? req.getMethod() : "Unknown",
+ (req != null) ? req.getRequestURL().toString() : "Unknown",
+ (req != null) ? req.getRemoteHost() : "Unknown", Integer.toString(status));
+ MDC.clear();
+
+ return "Hello, " + input + ".";
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/CrossEntityReference.java b/src/main/java/org/openecomp/datarouter/util/CrossEntityReference.java
new file mode 100644
index 0000000..48f3c27
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/CrossEntityReference.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Processing and entity wrapper for property transposition logic and UEB processing
+ *
+ * @author DAVEA
+ */
+public class CrossEntityReference {
+
+ private String targetEntityType;
+
+ private List<String> attributeNames;
+
+ public CrossEntityReference() {
+ this.targetEntityType = null;
+ this.attributeNames = new ArrayList<String>();
+ }
+
+ public String getTargetEntityType() {
+ return targetEntityType;
+ }
+
+ public void setTargetEntityType(String targetEntityType) {
+ this.targetEntityType = targetEntityType;
+ }
+
+ public List<String> getAttributeNames() {
+ return attributeNames;
+ }
+
+ public void setAttributeNames(List<String> attributeNames) {
+ this.attributeNames = attributeNames;
+ }
+
+ public void addAttributeName(String attributeName) {
+ if (!this.attributeNames.contains(attributeName)) {
+ this.attributeNames.add(attributeName);
+ }
+ }
+
+ public void initialize(String crossEntityReferenceValueFromModel) {
+
+ if (crossEntityReferenceValueFromModel == null
+ || crossEntityReferenceValueFromModel.length() == 0) {
+ // or throw an exception due to failure to initialize
+ return;
+ }
+
+ String[] tokens = crossEntityReferenceValueFromModel.split(",");
+
+ if (tokens.length >= 2) {
+ this.targetEntityType = tokens[0];
+
+ for (int x = 1; x < tokens.length; x++) {
+ this.attributeNames.add(tokens[x]);
+ }
+ } else {
+ // throw a CrossEntityReferenceInitializationException??
+ }
+
+ }
+
+ @Override
+ public String toString() {
+ return "CrossEntityReference ["
+ + (targetEntityType != null ? "entityType=" + targetEntityType + ", " : "")
+ + (attributeNames != null ? "attributeNames=" + attributeNames : "") + "]";
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/DataRouterConstants.java b/src/main/java/org/openecomp/datarouter/util/DataRouterConstants.java
new file mode 100644
index 0000000..f9f5df3
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/DataRouterConstants.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+public class DataRouterConstants {
+ public static final String DR_FILESEP = (System.getProperty("file.separator") == null) ? "/"
+ : System.getProperty("file.separator");
+
+ public static final String DR_SPECIFIC_CONFIG = System.getProperty("CONFIG_HOME") + DR_FILESEP;
+
+ public static final String DR_BUNDLECONFIG_NAME = (System.getProperty("BUNDLECONFIG_DIR") == null)
+ ? "bundleconfig" : System.getProperty("BUNDLECONFIG_DIR");
+
+ public static final String DR_HOME_BUNDLECONFIG = (System.getProperty("AJSC_HOME") == null)
+ ? DR_FILESEP + "opt" + DR_FILESEP + "app" + DR_FILESEP
+ + "datalayer" + DR_FILESEP + DR_BUNDLECONFIG_NAME
+ : System.getProperty("AJSC_HOME") + DR_FILESEP + DR_BUNDLECONFIG_NAME;
+
+ /** This is the etc directory, relative to AAI_HOME. */
+ public static final String DR_HOME_ETC = DR_HOME_BUNDLECONFIG + DR_FILESEP + "etc" + DR_FILESEP;
+
+ public static final String DR_HOME_MODEL = DR_SPECIFIC_CONFIG + "model" + DR_FILESEP;
+ public static final String DR_HOME_AUTH = DR_SPECIFIC_CONFIG + "auth" + DR_FILESEP;
+
+ public static final String DR_CONFIG_FILE = DR_SPECIFIC_CONFIG + "data-router.properties";
+
+ public static final String DR_HOME_ETC_OXM = DR_HOME_ETC + "oxm" + DR_FILESEP;
+
+
+ // AAI Related
+ public static final String AAI_ECHO_SERVICE = "/util/echo";
+
+ // Logging related
+ public static final String DATA_ROUTER_SERVICE_NAME = "DataRouter";
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/DataRouterProperties.java b/src/main/java/org/openecomp/datarouter/util/DataRouterProperties.java
new file mode 100644
index 0000000..7e18363
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/DataRouterProperties.java
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Properties;
+
+public class DataRouterProperties {
+
+ private static Properties properties;
+
+ static {
+ properties = new Properties();
+ File file = new File(DataRouterConstants.DR_CONFIG_FILE);
+ try {
+ properties.load(new FileInputStream(file));
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static String get(String key) {
+ return properties.getProperty(key);
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/EntityOxmReferenceHelper.java b/src/main/java/org/openecomp/datarouter/util/EntityOxmReferenceHelper.java
new file mode 100644
index 0000000..c9e1746
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/EntityOxmReferenceHelper.java
@@ -0,0 +1,62 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.util.HashMap;
+
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+public class EntityOxmReferenceHelper implements ExternalOxmModelProcessor {
+
+ private static EntityOxmReferenceHelper _instance = null;
+
+ private HashMap<Version, VersionedOxmEntities> versionedModels;
+
+ protected EntityOxmReferenceHelper() {
+ this.versionedModels = new HashMap<Version,VersionedOxmEntities>();
+ }
+
+ public static EntityOxmReferenceHelper getInstance() {
+ if ( _instance == null ) {
+ _instance = new EntityOxmReferenceHelper();
+ }
+
+ return _instance;
+ }
+
+
+ @Override
+ public void onOxmVersionChange(Version version, DynamicJAXBContext context) {
+ VersionedOxmEntities oxmEntities = new VersionedOxmEntities();
+ oxmEntities.initialize(context);
+ this.versionedModels.put(version, oxmEntities);
+
+ }
+
+ public VersionedOxmEntities getVersionedOxmEntities(Version version){
+ return this.versionedModels.get(version);
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/datarouter/util/ExternalOxmModelProcessor.java b/src/main/java/org/openecomp/datarouter/util/ExternalOxmModelProcessor.java
new file mode 100644
index 0000000..9fcef6f
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/ExternalOxmModelProcessor.java
@@ -0,0 +1,37 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+
+/**
+ * Exposes a registration and processing contract so that any load/change of an oxm version will
+ * inform registered model processors.
+ *
+ * @author davea
+ */
+public interface ExternalOxmModelProcessor {
+ public void onOxmVersionChange(Version version, DynamicJAXBContext context);
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/FileWatcher.java b/src/main/java/org/openecomp/datarouter/util/FileWatcher.java
new file mode 100644
index 0000000..e382c21
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/FileWatcher.java
@@ -0,0 +1,49 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.io.File;
+import java.util.TimerTask;
+
+public abstract class FileWatcher extends TimerTask {
+ private long timeStamp;
+ private File file;
+
+ public FileWatcher(File file) {
+ this.file = file;
+ this.timeStamp = file.lastModified();
+ }
+
+ public final void run() {
+ long timeStamp = file.lastModified();
+
+ if ((timeStamp - this.timeStamp) > 500) {
+ this.timeStamp = timeStamp;
+ onChange(file);
+ }
+ }
+
+ protected abstract void onChange(File file);
+} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/datarouter/util/NodeUtils.java b/src/main/java/org/openecomp/datarouter/util/NodeUtils.java
new file mode 100644
index 0000000..f957247
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/NodeUtils.java
@@ -0,0 +1,46 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.util.Arrays;
+
+public class NodeUtils {
+ /**
+ * Generate unique sha digest. This method is copy over from NodeUtils class in AAIUI
+ *
+ * @param keys the keys
+ * @return the string
+ */
+ public static String generateUniqueShaDigest(String... keys) {
+ if ((keys == null) || keys.length == 0) {
+ return null;
+ }
+
+ final String keysStr = Arrays.asList(keys).toString();
+ final String hashedId = org.apache.commons.codec.digest.DigestUtils.sha256Hex(keysStr);
+
+ return hashedId;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/OxmModelLoader.java b/src/main/java/org/openecomp/datarouter/util/OxmModelLoader.java
new file mode 100644
index 0000000..2919ba2
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/OxmModelLoader.java
@@ -0,0 +1,166 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.core.Response.Status;
+import javax.xml.bind.JAXBException;
+
+import org.eclipse.persistence.jaxb.JAXBContextProperties;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContextFactory;
+import org.openecomp.cl.eelf.LoggerFactory;
+import org.openecomp.datarouter.logging.DataRouterMsgs;
+
+import org.openecomp.datarouter.util.ExternalOxmModelProcessor;
+
+public class OxmModelLoader {
+
+ private static Map<String, DynamicJAXBContext> versionContextMap = new ConcurrentHashMap<String, DynamicJAXBContext>();
+ private static Map<String, Timer> timers = new ConcurrentHashMap<String, Timer>();
+ private static List<ExternalOxmModelProcessor> oxmModelProcessorRegistry = new ArrayList<ExternalOxmModelProcessor>();
+ final static Pattern p = Pattern.compile("aai_oxm_(.*).xml");
+
+
+
+ private static org.openecomp.cl.api.Logger logger = LoggerFactory.getInstance()
+ .getLogger(OxmModelLoader.class.getName());
+
+ public synchronized static void loadModels() {
+
+ File[] listOfFiles = new File(DataRouterConstants.DR_HOME_MODEL).listFiles();
+
+ if (listOfFiles != null) {
+ for (File file : listOfFiles) {
+ if (file.isFile()) {
+ Matcher m = p.matcher(file.getName());
+ if (m.matches()) {
+ try {
+ OxmModelLoader.loadModel(m.group(1), file);
+ } catch (Exception e) {
+ logger.error(DataRouterMsgs.INVALID_OXM_FILE, file.getName(), e.getMessage());
+ }
+ }
+
+ }
+ }
+ } else {
+ logger.error(DataRouterMsgs.INVALID_OXM_DIR, DataRouterConstants.DR_HOME_MODEL);
+ }
+
+
+ }
+
+ private static void addtimer(String version,File file){
+ TimerTask task = null;
+ task = new FileWatcher(
+ file) {
+ protected void onChange(File file) {
+ // here we implement the onChange
+ logger.info(DataRouterMsgs.FILE_CHANGED, file.getName());
+
+ try {
+ OxmModelLoader.loadModel(version,file);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+ };
+
+ if (!timers.containsKey(version)) {
+ Timer timer = new Timer("oxm-"+version);
+ timer.schedule(task, new Date(), 10000);
+ timers.put(version, timer);
+
+ }
+ }
+
+ private synchronized static void loadModel(String version,File file) throws JAXBException, FileNotFoundException {
+
+
+ InputStream iStream = new FileInputStream(file);
+ Map<String, Object> properties = new HashMap<String, Object>();
+ properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, iStream);
+ final DynamicJAXBContext jaxbContext = DynamicJAXBContextFactory
+ .createContextFromOXM(Thread.currentThread().getContextClassLoader(), properties);
+ versionContextMap.put(version, jaxbContext);
+ if ( oxmModelProcessorRegistry != null) {
+ for ( ExternalOxmModelProcessor processor : oxmModelProcessorRegistry ) {
+ processor.onOxmVersionChange(Version.valueOf(version), jaxbContext );
+ }
+ }
+ addtimer(version,file);
+
+ }
+
+ public static DynamicJAXBContext getContextForVersion(String version) throws Exception {
+ if (versionContextMap == null || versionContextMap.isEmpty()) {
+ loadModels();
+ } else if (!versionContextMap.containsKey(version)) {
+ try {
+ loadModel(version,new File (DataRouterConstants.DR_HOME_MODEL + "aai_oxm_" + version + ".xml"));
+ } catch (Exception e) {
+ throw new Exception(Status.NOT_FOUND.toString());
+ }
+ }
+
+ return versionContextMap.get(version);
+ }
+
+ public static Map<String, DynamicJAXBContext> getVersionContextMap() {
+ return versionContextMap;
+ }
+
+ public static void setVersionContextMap(Map<String, DynamicJAXBContext> versionContextMap) {
+ OxmModelLoader.versionContextMap = versionContextMap;
+ }
+
+ public synchronized static void registerExternalOxmModelProcessors(Collection<ExternalOxmModelProcessor> processors) {
+ if(processors != null) {
+ for(ExternalOxmModelProcessor processor : processors) {
+ if(!oxmModelProcessorRegistry.contains(processor)) {
+ oxmModelProcessorRegistry.add(processor);
+ }
+ }
+ }
+ }
+
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/RouterServiceUtil.java b/src/main/java/org/openecomp/datarouter/util/RouterServiceUtil.java
new file mode 100644
index 0000000..b30c9f9
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/RouterServiceUtil.java
@@ -0,0 +1,226 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import com.fasterxml.jackson.databind.JsonNode;
+
+import org.json.JSONObject;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+public class RouterServiceUtil {
+
+ public static Map<String, String> parseJsonPayloadIntoMap(String jsonPayload) {
+
+ JSONObject jsonObject = new JSONObject(jsonPayload);
+ Map<String, String> map = new HashMap<String, String>();
+ Iterator iter = jsonObject.keys();
+ while (iter.hasNext()) {
+ String key = (String) iter.next();
+ String value = jsonObject.getString(key);
+ map.put(key, value);
+ }
+ return map;
+ }
+
+ public static String getNodeFieldAsText(JsonNode node, String fieldName) {
+
+ String fieldValue = null;
+
+ JsonNode valueNode = node.get(fieldName);
+
+ if (valueNode != null) {
+ fieldValue = valueNode.asText();
+ }
+
+ return fieldValue;
+ }
+
+ public static String concatArray(List<String> list) {
+ return concatArray(list, " ");
+ }
+
+ public static String concatArray(List<String> list, String delimiter) {
+
+ if (list == null || list.size() == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ boolean firstValue = true;
+
+ for (String item : list) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(delimiter).append(item);
+ }
+ }
+
+ return result.toString();
+
+ }
+
+ public static String concatArray(String[] values) {
+
+ if (values == null || values.length == 0) {
+ return "";
+ }
+
+ StringBuilder result = new StringBuilder(64);
+
+ boolean firstValue = true;
+
+ for (String item : values) {
+
+ if (firstValue) {
+ result.append(item);
+ firstValue = false;
+ } else {
+ result.append(".").append(item);
+ }
+
+ }
+
+ return result.toString();
+
+ }
+
+ public static String recursivelyLookupJsonPayload(JsonNode node, String key) {
+ String value = null;
+ if (node.isObject()) {
+ Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields();
+
+ while (nodeIterator.hasNext()) {
+ Map.Entry<String, JsonNode> entry = (Map.Entry<String, JsonNode>) nodeIterator.next();
+ if (!entry.getValue().isValueNode()) {
+ value = recursivelyLookupJsonPayload(entry.getValue(), key);
+ if (value != null) {
+ return value;
+ }
+ }
+
+ String name = entry.getKey();
+ if (name.equalsIgnoreCase(key)) {
+ return entry.getValue().asText();
+ }
+ }
+ } else if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ value = recursivelyLookupJsonPayload(arrayItemsIterator.next(), key);
+ if (value != null) {
+ return value;
+ }
+ }
+ }
+ return value;
+ }
+
+ public static void extractObjectsByKey(JsonNode node, String searchKey,
+ Collection<JsonNode> foundObjects) {
+
+ if (node.isObject()) {
+ Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields();
+
+ while (nodeIterator.hasNext()) {
+ Map.Entry<String, JsonNode> entry = (Map.Entry<String, JsonNode>) nodeIterator.next();
+ if (!entry.getValue().isValueNode()) {
+ extractObjectsByKey(entry.getValue(), searchKey, foundObjects);
+ }
+
+ String name = entry.getKey();
+ if (name.equalsIgnoreCase(searchKey)) {
+
+ JsonNode entryValue = entry.getValue();
+
+ if (entryValue.isArray()) {
+
+ Iterator<JsonNode> arrayItemsIterator = entryValue.elements();
+ while (arrayItemsIterator.hasNext()) {
+ foundObjects.add(arrayItemsIterator.next());
+ }
+
+ } else {
+ foundObjects.add(entry.getValue());
+ }
+ }
+ }
+ } else if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ extractObjectsByKey(arrayItemsIterator.next(), searchKey, foundObjects);
+ }
+ }
+ }
+
+ public static void convertArrayIntoList(JsonNode node, Collection<JsonNode> instances) {
+
+ if (node.isArray()) {
+ Iterator<JsonNode> arrayItemsIterator = node.elements();
+ while (arrayItemsIterator.hasNext()) {
+ instances.add(arrayItemsIterator.next());
+ }
+ } else {
+ instances.add(node);
+ }
+ }
+
+ public static void extractFieldValuesFromObject(JsonNode node,
+ Collection<String> attributesToExtract, Collection<String> fieldValues) {
+
+ if (node.isObject()) {
+
+ JsonNode valueNode = null;
+
+ for (String attrToExtract : attributesToExtract) {
+
+ valueNode = node.get(attrToExtract);
+
+ if (valueNode != null) {
+
+ if (valueNode.isValueNode()) {
+ fieldValues.add(valueNode.asText());
+ }
+ }
+ }
+ }
+ }
+
+
+ public static String objToJson(Object obj) {
+ JSONObject jsonObject = new JSONObject(obj);
+ String json = jsonObject.toString();
+ return json;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/SearchSuggestionPermutation.java b/src/main/java/org/openecomp/datarouter/util/SearchSuggestionPermutation.java
new file mode 100644
index 0000000..91f5910
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/SearchSuggestionPermutation.java
@@ -0,0 +1,89 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class SearchSuggestionPermutation {
+ /*
+ * Will return all the unique combinations of the suggestions provided.
+ * The order of the permutation is not taken into account when computing
+ * the uniqueness.
+ * e.g.: A list of A,B,C,D will return
+ * [[A], [A, B, C, D], [A, C, D], [A, D], [B], [B, C, D], [B, D], [C], [C, D], [D]]
+ *
+ * @param list The list of statuses to create permutations of
+ * @return A list which contains a array list of all possible combinations
+ */
+ @SuppressWarnings("serial")
+ public List<ArrayList<String>> getSuggestionsPermutation(List<String> list) {
+ List<String> statusList = new ArrayList<>(list);
+ List<String> dupStatusList;
+ ArrayList<ArrayList<String>> uniqueList = new ArrayList<>();
+ int mainLoopIndexCounter = 0;
+
+ for (String status : statusList) {
+ // Add the single entity subset
+ //This will add the unique single values eg [A],[B],[C],[D]
+ uniqueList.add(new ArrayList<String>() {
+ {
+ add(status);
+ }
+ });
+
+ // Remove all the elements to left till the current index
+ dupStatusList = truncateListUntill(statusList, mainLoopIndexCounter);
+
+ while (!dupStatusList.isEmpty()) {
+ ArrayList<String> suggListInIterate= new ArrayList<>();
+ suggListInIterate.add(status);
+
+ for (String dupStatus : dupStatusList) {
+ suggListInIterate.add(dupStatus);
+ }
+
+ uniqueList.add(suggListInIterate);
+ dupStatusList.remove(0);
+ }
+
+ mainLoopIndexCounter++;
+ }
+
+ return uniqueList;
+ }
+
+ private List<String> truncateListUntill(List<String> lists, int index) {
+ List<String> truncatedList = new ArrayList<>(lists);
+ int counter = 0;
+
+ while (counter <= index) {
+ truncatedList.remove(0);
+ counter++;
+ }
+
+ return truncatedList;
+ }
+}
diff --git a/src/main/java/org/openecomp/datarouter/util/Version.java b/src/main/java/org/openecomp/datarouter/util/Version.java
new file mode 100644
index 0000000..a0b3e5f
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/Version.java
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+public enum Version {
+ v2, v3, v4, v5, v6, v7, v8, v9, v10;
+} \ No newline at end of file
diff --git a/src/main/java/org/openecomp/datarouter/util/VersionedOxmEntities.java b/src/main/java/org/openecomp/datarouter/util/VersionedOxmEntities.java
new file mode 100644
index 0000000..46c4a76
--- /dev/null
+++ b/src/main/java/org/openecomp/datarouter/util/VersionedOxmEntities.java
@@ -0,0 +1,352 @@
+/**
+ * ============LICENSE_START=======================================================
+ * DataRouter
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.datarouter.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Vector;
+
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.internal.helper.DatabaseField;
+import org.eclipse.persistence.internal.oxm.XPathFragment;
+import org.eclipse.persistence.internal.oxm.mappings.Descriptor;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.eclipse.persistence.mappings.DatabaseMapping;
+import org.eclipse.persistence.oxm.XMLField;
+import org.openecomp.datarouter.entity.OxmEntityDescriptor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Builds up a representation of the versioned entities in a way that they can be cross referenced
+ * in a data-driven way
+ * @author DAVEA
+ */
+public class VersionedOxmEntities {
+
+ private static final Logger logger = LoggerFactory.getLogger(VersionedOxmEntities.class);
+ private static final String REST_ROOT_ENTITY = "inventory";
+
+ private HashMap<String,Boolean> crossEntityReferenceContainerLookup = new HashMap<String,Boolean>();
+ private HashMap<String,CrossEntityReference> crossEntityReferenceLookup = new HashMap<String,CrossEntityReference>();
+ private Map<String,DynamicType> entityTypeLookup = new LinkedHashMap<String,DynamicType>();
+ private Map<String, OxmEntityDescriptor> searchableEntityDescriptors = new HashMap<String, OxmEntityDescriptor>();
+ private Map<String, OxmEntityDescriptor> suggestableEntityDescriptors = new HashMap<String, OxmEntityDescriptor>();
+ private Map<String, OxmEntityDescriptor> entityAliasDescriptors = new HashMap<String, OxmEntityDescriptor>();
+
+
+ public void initialize(DynamicJAXBContext context) {
+ parseOxmContext(context);
+ buildCrossEntityReferenceCollections(REST_ROOT_ENTITY, new HashSet<String>());
+ populateSearchableDescriptors(context);
+ }
+
+ /**
+ * The big goal for these methods is to make the processing as generic and model driven as possible. There are only two
+ * exceptions to this rule, at the moment. I needed to hard-coded the top level REST data model entity type, which is
+ * "inventory" for now. And as this class is heavily focused and coupled towards building a version specific set of
+ * lookup structures for the "crossEntityReference" model attribute, it possesses knowledge of that attribute whether it
+ * exists or not in the DynamicJAXBContext we are currently analyzing.
+ *
+ * This method will build two collections:
+ *
+ * 1) A list of entity types that can have nested entities containing cross entity reference definitions. The purpose
+ * of this collection is a fail-fast test when processing UEB events so we can quickly determine if it is necessary
+ * to deeply parse the event looking for cross entity reference attributes which not exist.
+ *
+ * For example, looking at a service-instance <=> inventory path:
+ *
+ * inventory (true)
+ * -> business (true)
+ * -> customers (true)
+ * -> customer (true)
+ * -> service-subscriptions (true)
+ * -> service-subscription (CER defined here in the model) (true)
+ * -> service-instances (false)
+ * -> service-instance (false)
+ *
+ * Because service-subscription contains a model definition of CER, in the first collection all the types in the tree will
+ * indicate that it possesses one or more contained entity types with a cross-entity-reference definition.
+ *
+ * 2) A lookup for { entityType => CrossEntityReference } so we can quickly access the model definition of a CER for
+ * a specific entity type when we begin extracting parent attributes for transposition into nested child entity types.
+ *
+ *
+ * @param entityType
+ * @param checked
+ * @return
+ */
+ protected boolean buildCrossEntityReferenceCollections(String entityType, HashSet<String> checked) {
+
+ /*
+ * To short-circuit infinite loops, make sure this entityType hasn't
+ * already been checked
+ */
+
+ if(checked.contains(entityType)) {
+ return false;
+ }
+ else {
+ checked.add(entityType);
+ }
+
+ DynamicType parentType = entityTypeLookup.get(entityType);
+ DynamicType childType = null;
+ boolean returnValue = false;
+
+ if(parentType == null) {
+ return returnValue;
+ }
+
+ /*
+ * Check if current descriptor contains the cross-entity-reference
+ * attribute. If it does not walk the entity model looking for nested
+ * entity types that may contain the reference.
+ */
+
+ Map<String, String> properties = parentType.getDescriptor().getProperties();
+ if(properties != null) {
+ for(Map.Entry<String, String> entry : properties.entrySet()) {
+ if(entry.getKey().equalsIgnoreCase("crossEntityReference")) {
+ returnValue = true;
+ CrossEntityReference cer = new CrossEntityReference();
+ cer.initialize(entry.getValue());
+ crossEntityReferenceLookup.put( entityType, cer);
+ //System.out.println("entityType = " + entityType + " contains a CER instance = " + returnValue);
+ // return true;
+ }
+ }
+ }
+
+ Vector<DatabaseField> fields = parentType.getDescriptor().getAllFields();
+
+ if(fields != null) {
+
+ XMLField xmlField = null;
+ for(DatabaseField f : fields) {
+
+ if(f instanceof XMLField) {
+ xmlField = (XMLField)f;
+ XPathFragment xpathFragment = xmlField.getXPathFragment();
+ String entityShortName = xpathFragment.getLocalName();
+
+ childType = entityTypeLookup.get(entityShortName);
+
+ if(childType != null) {
+
+ if(!checked.contains(entityShortName)) {
+
+ if(buildCrossEntityReferenceCollections(entityShortName,checked)) {
+ returnValue = true;
+ }
+
+ }
+
+ checked.add(entityShortName);
+
+ }
+
+ }
+
+ }
+
+ }
+
+ crossEntityReferenceContainerLookup.put(entityType, Boolean.valueOf(returnValue));
+ return returnValue;
+ }
+
+ private void populateSearchableDescriptors(DynamicJAXBContext oxmContext) {
+ List<Descriptor> descriptorsList = oxmContext.getXMLContext().getDescriptors();
+ OxmEntityDescriptor newOxmEntity = null;
+
+ for (Descriptor desc: descriptorsList) {
+
+ DynamicType entity = (DynamicType) oxmContext.getDynamicType(desc.getAlias());
+
+ //LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>();
+ String primaryKeyAttributeNames = null;
+
+ //Not all fields have key attributes
+ if (desc.getPrimaryKeyFields() != null) {
+ primaryKeyAttributeNames = desc.getPrimaryKeyFields().toString().replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "");
+ }
+
+ String entityName = desc.getDefaultRootElement();
+
+ Map<String, String> properties = entity.getDescriptor().getProperties();
+ if (properties != null) {
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+ if (entry.getKey().equalsIgnoreCase("searchable")) {
+
+ /*
+ * we can do all the work here, we don't have a create additional collections for
+ * subsequent passes
+ */
+ newOxmEntity = new OxmEntityDescriptor();
+ newOxmEntity.setEntityName(entityName);
+ newOxmEntity.setPrimaryKeyAttributeName(Arrays.asList(primaryKeyAttributeNames.split(",")));
+ newOxmEntity.setSearchableAttributes(Arrays.asList(entry.getValue().split(",")));
+ searchableEntityDescriptors.put(entityName, newOxmEntity);
+ } else if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) {
+ newOxmEntity = new OxmEntityDescriptor();
+ newOxmEntity.setEntityName(entityName);
+ newOxmEntity.setSuggestableEntity(true);
+ Vector<DatabaseMapping> descriptorMaps = entity.getDescriptor().getMappings();
+ List<String> listOfSuggestableAttributes = new ArrayList<String>();
+
+ for (DatabaseMapping descMap : descriptorMaps) {
+ if (descMap.isAbstractDirectMapping()) {
+
+ if (descMap.getProperties().get("suggestibleOnSearch") != null) {
+ String suggestableOnSearchString = String.valueOf(
+ descMap.getProperties().get("suggestibleOnSearch"));
+
+ boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString);
+
+ if (isSuggestibleOnSearch) {
+ /* Grab attribute types for suggestion */
+ String attributeName = descMap.getField().getName()
+ .replaceAll("/text\\(\\)", "");
+ listOfSuggestableAttributes.add(attributeName);
+ }
+ }
+ }
+ }
+ newOxmEntity.setSuggestableAttributes(listOfSuggestableAttributes);
+ suggestableEntityDescriptors.put(entityName, newOxmEntity);
+ } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) {
+ newOxmEntity = new OxmEntityDescriptor();
+ newOxmEntity.setEntityName(entityName);
+ newOxmEntity.setAlias(Arrays.asList(entry.getValue().split(",")));
+ entityAliasDescriptors.put(entityName, newOxmEntity);
+ }
+ }
+ }
+
+ }
+
+ }
+
+ public Map<String, OxmEntityDescriptor> getSearchableEntityDescriptors() {
+ return searchableEntityDescriptors;
+ }
+
+ public OxmEntityDescriptor getSearchableEntityDescriptor(String entityType) {
+ return searchableEntityDescriptors.get(entityType);
+ }
+
+
+ public HashMap<String,Boolean> getCrossEntityReferenceContainers() {
+ return crossEntityReferenceContainerLookup;
+ }
+
+ public HashMap<String,CrossEntityReference> getCrossEntityReferences() {
+ return crossEntityReferenceLookup;
+ }
+
+
+ private void parseOxmContext(DynamicJAXBContext oxmContext) {
+ List<Descriptor> descriptorsList = oxmContext.getXMLContext().getDescriptors();
+
+ for(Descriptor desc : descriptorsList) {
+
+ DynamicType entity = (DynamicType)oxmContext.getDynamicType(desc.getAlias());
+
+ String entityName = desc.getDefaultRootElement();
+
+ entityTypeLookup.put(entityName, entity);
+
+ }
+
+ }
+
+ public boolean entityModelContainsCrossEntityReference(String containerEntityType) {
+ Boolean v = crossEntityReferenceContainerLookup.get(containerEntityType);
+
+ if(v == null) {
+ return false;
+ }
+
+ return v;
+ }
+
+ public boolean entityContainsCrossEntityReference(String entityType) {
+ return (crossEntityReferenceLookup.get(entityType) != null);
+ }
+
+ public CrossEntityReference getCrossEntityReference(String entityType) {
+ return crossEntityReferenceLookup.get(entityType);
+ }
+
+ public Map<String, OxmEntityDescriptor> getSuggestableEntityDescriptors() {
+ return suggestableEntityDescriptors;
+ }
+
+ public void setSuggestableEntityDescriptors(
+ Map<String, OxmEntityDescriptor> suggestableEntityDescriptors) {
+ this.suggestableEntityDescriptors = suggestableEntityDescriptors;
+ }
+
+ public Map<String, OxmEntityDescriptor> getEntityAliasDescriptors() {
+ return entityAliasDescriptors;
+ }
+
+ public void setEntityAliasDescriptors(Map<String, OxmEntityDescriptor> entityAliasDescriptors) {
+ this.entityAliasDescriptors = entityAliasDescriptors;
+ }
+
+ public void extractEntities(String entityType, DynamicJAXBContext context, Collection<DynamicType> entities) {
+
+
+
+
+ }
+
+ public String dumpCrossEntityReferenceContainers() {
+
+ Set<String> keys = crossEntityReferenceContainerLookup.keySet();
+ StringBuilder sb = new StringBuilder(128);
+
+ for ( String key : keys ) {
+
+ if ( crossEntityReferenceContainerLookup.get(key) ) {
+ sb.append("\n").append("Entity-Type = '" + key + "' contains a Cross-Entity-Reference.");
+ }
+ }
+
+
+ return sb.toString();
+
+ }
+
+} \ No newline at end of file
diff --git a/src/main/resources/entitysearch_schema.json b/src/main/resources/entitysearch_schema.json
new file mode 100644
index 0000000..5ff7625
--- /dev/null
+++ b/src/main/resources/entitysearch_schema.json
@@ -0,0 +1,35 @@
+{
+ "fields": [
+ {
+ "name": "entityType",
+ "data-type": "string",
+ "searchable": "true",
+ "index-analyzer": "ngram_analyzer"
+ },
+ {
+ "name": "entityPrimaryKeyValue",
+ "data-type": "string",
+ "searchable": "false"
+ },
+ {
+ "name": "searchTagIDs",
+ "data-type": "string",
+ "searchable": "true"
+ },
+ {
+ "name": "searchTags",
+ "data-type": "string",
+ "index-analyzer": "ngram_analyzer"
+ },
+ {
+ "name": "crossEntityReferenceValues",
+ "data-type": "string",
+ "index-analyzer": "ngram_analyzer"
+ },
+ {
+ "name": "lastmodTimestamp",
+ "data-type": "date",
+ "format": "MMM d y HH:m:s||dd-MM-yyyy HH:mm:ss||yyyy-MM-dd'T'HH:mm:ss.SSSZZ||yyyy-MM-dd HH:mm:ss||MM\/dd\/yyyy||yyyyMMdd'T'HHmmssZ"
+ }
+ ]
+}
diff --git a/src/main/resources/logging/DataRouterMsgs.properties b/src/main/resources/logging/DataRouterMsgs.properties
new file mode 100644
index 0000000..352f239
--- /dev/null
+++ b/src/main/resources/logging/DataRouterMsgs.properties
@@ -0,0 +1,135 @@
+#Resource key=Error Code|Message text|Resolution text |Description text
+#######
+#Newlines can be utilized to add some clarity ensuring continuing line
+#has atleast one leading space
+#ResourceKey=\
+# ERR0000E\
+# Sample error msg txt\
+# Sample resolution msg\
+# Sample description txt
+#
+######
+#Error code classification category
+#000 Info/Debug
+#100 Permission errors
+#200 Availability errors/Timeouts
+#300 Data errors
+#400 Schema Interface type/validation errors
+#500 Business process errors
+#900 Unknown errors
+#
+########################################################################
+
+#Resource key=Error Code|Message text|Resolution text |Description text
+#######
+#Newlines can be utilized to add some clarity ensuring continuing line
+#has atleast one leading space
+#ResourceKey=\
+# ERR0000E\
+# Sample error msg txt\
+# Sample resolution msg\
+# Sample description txt
+#
+######
+#Error code classification category
+#000 Info/Debug
+#100 Permission errors
+#200 Availability errors/Timeouts
+#300 Data errors
+#400 Schema Interface type/validation errors
+#500 Business process errors
+#900 Unknown errors
+#
+########################################################################
+
+SERVICE_STARTED=\
+ DataRouter service started
+
+FILE_CHANGED=\
+ DR0002I|\
+ File {0} has been changed!
+
+FILE_RELOADED=\
+ DR0003I|\
+ File {0} has been reloaded!
+
+REPORT_CONFIG_WATCHER_INT=\
+ DR0004I|\
+ Config Watcher Interval = {0}
+
+LOADING_PROPERTIES=\
+ DR0005I|\
+ Loading properties - {0}
+
+PROPERTIES_LOADED=\
+ DR0006I|\
+ File {0} is loaded into the map and the corresponding system properties have been refreshed
+
+UEB_NO_EVENTS_RECEIVED=\
+ DR0007I|\
+ Received no events on topic: {0}
+
+CONFIGURING_ROUTING_POLICIES=\
+ DR0008I|\
+ Configuring routing policies on Data Layer
+
+PROCESS_REST_REQUEST=\
+ DR0009I|\
+ Received request {0} {1} from {2}. Sending response: {3}
+
+PROCESS_EVENT=\
+ DR0010I|\
+ Processed event {0}. Result: {1}
+
+STARTUP_FAILURE=\
+ DR2001E|\
+ DecoratorService failed to start up. Reason: {0}
+
+UEB_CONNECT_ERR=\
+ DR2002E|\
+ Failed to query UEB for event topic {0}: {1}
+
+QUERY_ROUTING_ERR=\
+ DR2003E|\
+ Failed to route query for event topic {0}: {1}
+
+LOAD_PROPERTIES_FAILURE=\
+ DR3001E|\
+ File {0} cannot be loaded into the map.
+
+FILE_MON_BLOCK_ERR=\
+ DR3002E|\
+ Error in the file monitor block.
+
+CREATE_PROPERTY_MAP_ERR=\
+ DR3003E|\
+ Error creating property map.
+
+FILE_STREAM_ERR=\
+ DR3004E|\
+ Error reading the file stream.
+
+ROUTING_POLICY_CONFIGURATION_ERROR=\
+ DR3005E|\
+ Could not configure routing policy {0} for source {1} and action {2}
+
+BAD_REST_REQUEST=\
+ DR3006E|\
+ Bad Rest Request {0}
+
+FAIL_TO_CREATE_SEARCH_INDEX=\
+ DR3008E|\
+ Failed to create Search index {0} due to: {1}
+
+INVALID_OXM_FILE=\
+ DR3009E|\
+ Unable to parse schema file: {0} due to error : {1}\
+
+INVALID_OXM_DIR=\
+ DR3010E|\
+ Invalid OXM dir: {0}\
+
+SYSTEM_ERROR=\
+ DR3011E|\
+ System Error: {0}\
+
diff --git a/src/main/resources/logging/EntityEventPolicyMsgs.properties b/src/main/resources/logging/EntityEventPolicyMsgs.properties
new file mode 100644
index 0000000..3fac391
--- /dev/null
+++ b/src/main/resources/logging/EntityEventPolicyMsgs.properties
@@ -0,0 +1,122 @@
+#Resource key=Error Code|Message text|Resolution text |Description text
+#######
+#Newlines can be utilized to add some clarity ensuring continuing line
+#has atleast one leading space
+#ResourceKey=\
+# ERR0000E\
+# Sample error msg txt\
+# Sample resolution msg\
+# Sample description txt
+#
+######
+#Error code classification category
+#000 Info/Debug
+#100 Permission errors
+#200 Availability errors/Timeouts
+#300 Data errors
+#400 Schema Interface type/validation errors
+#500 Business process errors
+#900 Unknown errors
+#
+########################################################################
+
+
+DISCARD_AAI_EVENT_VERBOSE=\
+ EEP0001E|\
+ Discarding event. Reason: {0}. Payload: {1}
+
+DISCARD_AAI_EVENT_NONVERBOSE=\
+ EEP0002E|\
+ Discarding event. Reason: {0}
+
+OXM_VERSION_NOT_SUPPORTED=\
+ EEP0003E|\
+ OXM version: {0} is not supported.
+
+FAILED_TO_PARSE_UEB_PAYLOAD=\
+ EEP0004E|\
+ Failed to parse UEB payload. {0}. {1}
+
+NO_ETAG_AVAILABLE_FAILURE=\
+ EEP0005E|\
+ Unable to retrieve etag at {0} for entity with id {1}
+
+ENTITY_OPERATION_NOT_SUPPORTED=\
+ EEP0006E|\
+ Action: {0} is not supported.
+
+FAILED_TO_UPDATE_ENTITY_IN_DOCSTORE=\
+ EEP007E|\
+ Failed to update entity {0} with operation {1}
+
+DISCARD_UPDATING_SEARCH_SUGGESTION_DATA=\
+ EEP010E|\
+ Discarding search suggestion data. Reason: {0}
+
+DISCARD_UPDATING_TOPOGRAPHY_DATA_VERBOSE=\
+ EEP008E|\
+ Discarding topographical data. Reason: {0}. Payload: {1}
+
+DISCARD_UPDATING_TOPOGRAPHY_DATA_NONVERBOSE=\
+ EEP009E|\
+ Discarding topographical data. Reason: {0}.
+
+PROCESS_OXM_MODEL_MISSING=\
+ EEP010E|\
+ Failed to load OXM Model.
+
+FAILED_TO_FIND_OXM_VERSION=\
+ EEP011E|\
+ Failed to find OXM version in UEB payload. {0}
+
+FAIL_TO_CREATE_SEARCH_INDEX=\
+ EEP012E|\
+ Failed to create Search index {0} due to: {1}
+
+PROCESS_AAI_ENTITY_EVENT_POLICY_VERBOSE=\
+ EEP0001I|\
+ Processing AAI Entity Event Policy: [Action: {0} Entity Type: {1}]. Payload: {2}
+
+PROCESS_AAI_ENTITY_EVENT_POLICY_NONVERBOSE=\
+ EEP0002I|\
+ Processing AAI Entity Event Policy: [Action: {0} Entity Type: {1}].
+
+CROSS_ENTITY_REFERENCE_SYNC=\
+ EEP0003I|\
+ Cross Entity Reference synchronization {0}
+
+OPERATION_RESULT_NO_ERRORS=\
+ EEP0004I|\
+ Operation {0} completed in {1} ms with no errors
+
+PROCESS_OXM_MODEL_FOUND=\
+ EEP0005I|\
+ Found OXM model: {0}
+
+SEARCH_INDEX_CREATE_SUCCESS=\
+ EEP0006I|\
+ Successfully created index at {0}
+
+ENTITY_EVENT_POLICY_REGISTERED=\
+ EEP0007I|\
+ Entity Event Policy component started.
+
+UEB_EVENT_HEADER_PARSED=\
+ EE0008I|\
+ Parsed UEB event header {0}
+
+PRIMARY_KEY_NULL_FOR_ENTITY_TYPE=\
+ EEP0301E|\
+ Primary key value is null for entity type: {0}
+
+UEB_INVALID_PAYLOAD_JSON_FORMAT=\
+ EE0302E|\
+ Payload has invalid JSON format: {0}
+
+UEB_FAILED_TO_PARSE_PAYLOAD=\
+ EE0303E|\
+ {0} missing
+
+UEB_FAILED_UEBEVENTHEADER_CONVERSION=\
+ EE0304E|\
+ {0} \ No newline at end of file
diff --git a/src/main/resources/topographysearch_schema.json b/src/main/resources/topographysearch_schema.json
new file mode 100644
index 0000000..d542631
--- /dev/null
+++ b/src/main/resources/topographysearch_schema.json
@@ -0,0 +1,8 @@
+{
+ "fields": [
+ {"name": "pkey", "data-type": "string", "searchable": "false"},
+ {"name": "entityType", "data-type": "string", "searchable": "false"},
+ {"name": "location", "data-type": "geo_point", "searchable": "false"},
+ {"name": "selfLink", "data-type": "string", "searchable": "false"}
+ ]
+} \ No newline at end of file
diff --git a/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context b/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
new file mode 100644
index 0000000..8514196
--- /dev/null
+++ b/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"__module_ajsc_namespace_name__:__module_ajsc_namespace_version__","contextName":"__module_ajsc_namespace_name__","contextVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ Context"}} \ No newline at end of file
diff --git a/src/main/runtime/context/default#0.context b/src/main/runtime/context/default#0.context
new file mode 100644
index 0000000..d1b5ab4
--- /dev/null
+++ b/src/main/runtime/context/default#0.context
@@ -0,0 +1 @@
+{"context":{"contextClass":"ajsc.Context","contextId":"default:0","contextName":"default","contextVersion":"0","description":"Default Context"}} \ No newline at end of file
diff --git a/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json b/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
new file mode 100644
index 0000000..d0954cf
--- /dev/null
+++ b/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
@@ -0,0 +1 @@
+{"deploymentPackage":{"Class":"ajsc.DeploymentPackage","Id":"__module.ajsc.namespace.name__:__module_ajsc_namespace_version__","namespace":"__module_ajsc_namespace_name__","namespaceVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ __module_ajsc_namespace_version__ - default description","userId":"ajsc"}} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/ajscadmin.json b/src/main/runtime/shiroRole/ajscadmin.json
new file mode 100644
index 0000000..f5e981e
--- /dev/null
+++ b/src/main/runtime/shiroRole/ajscadmin.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"ajscadmin","name":"ajscadmin","permissions":"[ajscadmin:*, ajsc:*]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644
index 0000000..2dae9f5
--- /dev/null
+++ b/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:__module_ajsc_namespace_name__","name":"contextadmin:__module_ajsc_namespace_name__","permissions":"[]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroRole/contextadmin#default.json b/src/main/runtime/shiroRole/contextadmin#default.json
new file mode 100644
index 0000000..5de814e
--- /dev/null
+++ b/src/main/runtime/shiroRole/contextadmin#default.json
@@ -0,0 +1 @@
+{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:default","name":"contextadmin:default","permissions":"[]"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUser/ajsc.json b/src/main/runtime/shiroUser/ajsc.json
new file mode 100644
index 0000000..f4c7855
--- /dev/null
+++ b/src/main/runtime/shiroUser/ajsc.json
@@ -0,0 +1 @@
+{"shiroUserClass":"ajsc.auth.ShiroUser","shiroUserId":"ajsc","passwordHash":"9471697417008c880720ba54c6038791ad7e98f3b88136fe34f4d31a462dd27a","permissions":"[*:*]","username":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json b/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
new file mode 100644
index 0000000..cb8d483
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:ajscadmin","roleId":"ajscadmin","userId":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
new file mode 100644
index 0000000..95d2361
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:__module_ajsc_namespace_name__","roleId":"contextadmin:__module_ajsc_namespace_name__","userId":"ajsc"} \ No newline at end of file
diff --git a/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json b/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
new file mode 100644
index 0000000..2bd5063
--- /dev/null
+++ b/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
@@ -0,0 +1 @@
+{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:default","roleId":"contextadmin:default","userId":"ajsc"} \ No newline at end of file