summaryrefslogtreecommitdiffstats
path: root/aai-resources/src/main
diff options
context:
space:
mode:
authorVenkata Harish K Kajur <vk250x@att.com>2018-01-30 13:52:34 -0500
committerVenkata Harish K Kajur <vk250x@att.com>2018-02-28 11:36:08 -0500
commit4f13d6cc64eed0ef75a31ec5f490853267447ab1 (patch)
treecb7066e12fc39038cc99b74bbeba879c9eb6ca8c /aai-resources/src/main
parent73539cae6d24e352b84a6411e523da734fbc19e2 (diff)
Turn ajsc 2 to using ajsc 6 spring boot
Issue-ID: AAI-800 Change-Id: Id174ec5088ddea57f18e605d004c417bee8fbf33 Signed-off-by: Venkata Harish K Kajur <vk250x@att.com>
Diffstat (limited to 'aai-resources/src/main')
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/FileMonitorBeans.xml20
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/jaxrsBeans.groovy29
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/serviceBeans.xml101
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/docs/README.txt1
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/lib/README.txt1
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/props/module.props1
-rw-r--r--aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/routes/aai.route4
-rw-r--r--aai-resources/src/main/assemble/ajsc_module_assembly.xml66
-rw-r--r--aai-resources/src/main/assemble/ajsc_props_assembly.xml23
-rw-r--r--aai-resources/src/main/assemble/ajsc_runtime_assembly.xml44
-rw-r--r--aai-resources/src/main/assembly/descriptor.xml32
-rw-r--r--aai-resources/src/main/config/ajsc-jetty.xml164
-rw-r--r--aai-resources/src/main/config/ajsc-jolokia-override-web.xml46
-rw-r--r--aai-resources/src/main/config/ajsc-override-web.xml41
-rw-r--r--aai-resources/src/main/config/ajsc-request.xml49
-rw-r--r--aai-resources/src/main/config/hazelcast-client.properties25
-rw-r--r--aai-resources/src/main/config/jul-redirect.properties13
-rw-r--r--aai-resources/src/main/config/keyfile27
-rw-r--r--aai-resources/src/main/config/logback-migration.xml56
-rw-r--r--aai-resources/src/main/config/runner-web.xml106
-rw-r--r--aai-resources/src/main/docker/Dockerfile22
-rw-r--r--aai-resources/src/main/docker/aai.sh (renamed from aai-resources/src/main/resources/docker/aai.sh)0
-rw-r--r--aai-resources/src/main/docker/docker-entrypoint.sh110
-rw-r--r--aai-resources/src/main/java/org/onap/aai/Profiles.java (renamed from aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java)20
-rw-r--r--aai-resources/src/main/java/org/onap/aai/ResourcesApp.java159
-rw-r--r--aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java99
-rw-r--r--aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java127
-rw-r--r--aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java219
-rw-r--r--aai-resources/src/main/java/org/onap/aai/config/JettyPasswordDecoder.java (renamed from aai-resources/src/main/java/org/onap/aai/config/DmaapConfig.java)24
-rw-r--r--aai-resources/src/main/java/org/onap/aai/config/PasswordDecoder.java27
-rw-r--r--aai-resources/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java83
-rw-r--r--aai-resources/src/main/java/org/onap/aai/dbgen/DupeTool.java1900
-rw-r--r--aai-resources/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java109
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java (renamed from aai-resources/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java)29
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java18
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java285
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java303
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java61
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/package-info.java38
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java34
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java (renamed from aai-resources/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java)52
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java51
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java127
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java (renamed from aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java)44
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java89
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java72
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java78
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java106
-rw-r--r--aai-resources/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java71
-rw-r--r--aai-resources/src/main/java/org/onap/aai/rest/LegacyMoxyConsumer.java63
-rw-r--r--aai-resources/src/main/java/org/onap/aai/rest/bulk/BulkUriInfo.java20
-rw-r--r--aai-resources/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java3
-rw-r--r--aai-resources/src/main/java/org/onap/aai/rest/util/EchoResponse.java14
-rw-r--r--aai-resources/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java120
-rw-r--r--aai-resources/src/main/java/org/onap/aai/web/JerseyConfiguration.java151
-rw-r--r--aai-resources/src/main/java/org/onap/aai/web/LocalHostAccessLog.java60
-rw-r--r--aai-resources/src/main/java/org/onap/aai/web/WebConfiguration.java48
-rw-r--r--aai-resources/src/main/jenkins/Jenkinsfile31
-rw-r--r--aai-resources/src/main/jenkins/build.groovy14
-rw-r--r--aai-resources/src/main/jenkins/checkout.groovy14
-rw-r--r--aai-resources/src/main/jenkins/deploy.groovy15
-rw-r--r--aai-resources/src/main/kubernetes/ajsc6configdemo-rc.yaml28
-rw-r--r--aai-resources/src/main/kubernetes/ajsc6configdemo-svc.yaml21
-rw-r--r--aai-resources/src/main/kubernetes/ajsc6configdemo.yaml8
-rw-r--r--aai-resources/src/main/kubernetes/kubectl.conf22
-rw-r--r--aai-resources/src/main/resources/application.properties70
-rw-r--r--aai-resources/src/main/resources/bootstrap.properties16
-rw-r--r--aai-resources/src/main/resources/docker/Dockerfile.ext76
-rw-r--r--aai-resources/src/main/resources/docker/commonLibs/README1
-rw-r--r--aai-resources/src/main/resources/docker/docker-entrypoint.sh104
-rw-r--r--aai-resources/src/main/resources/docker/init-chef.sh104
-rw-r--r--aai-resources/src/main/resources/etc/VNT-migration-data/VNT-migration-input.csv2
-rw-r--r--aai-resources/src/main/resources/etc/appprops/Introscope.properties8
-rw-r--r--aai-resources/src/main/resources/etc/appprops/PostProcessorInterceptors.properties3
-rw-r--r--aai-resources/src/main/resources/etc/appprops/PreProcessorInterceptors.properties3
-rw-r--r--aai-resources/src/main/resources/etc/appprops/aaiconfig.properties165
-rw-r--r--aai-resources/src/main/resources/etc/appprops/app-intercepts.properties6
-rw-r--r--aai-resources/src/main/resources/etc/appprops/caet.properties4
-rw-r--r--aai-resources/src/main/resources/etc/appprops/createDBSchema-logback.xml131
-rw-r--r--aai-resources/src/main/resources/etc/appprops/dataGrooming-logback.xml138
-rw-r--r--aai-resources/src/main/resources/etc/appprops/dataSnapshot-logback.xml139
-rw-r--r--aai-resources/src/main/resources/etc/appprops/default-logback.xml43
-rw-r--r--aai-resources/src/main/resources/etc/appprops/dupeTool-logback.xml62
-rw-r--r--aai-resources/src/main/resources/etc/appprops/dynamic.properties35
-rw-r--r--aai-resources/src/main/resources/etc/appprops/dynamicPayloadGenerator-logback.xml85
-rw-r--r--aai-resources/src/main/resources/etc/appprops/error.properties173
-rw-r--r--aai-resources/src/main/resources/etc/appprops/forceDelete-logback.xml85
-rw-r--r--aai-resources/src/main/resources/etc/appprops/getres-logback.xml123
-rw-r--r--aai-resources/src/main/resources/etc/appprops/loadDataForDHV-logback.xml116
-rw-r--r--aai-resources/src/main/resources/etc/appprops/logging.properties128
-rw-r--r--aai-resources/src/main/resources/etc/appprops/methodMapper.properties24
-rw-r--r--aai-resources/src/main/resources/etc/appprops/migration-logback.xml85
-rw-r--r--aai-resources/src/main/resources/etc/appprops/preferredRoute.txt1
-rw-r--r--aai-resources/src/main/resources/etc/appprops/pullInvData-logback.xml128
-rw-r--r--aai-resources/src/main/resources/etc/appprops/schemaMod-logback.xml85
-rw-r--r--aai-resources/src/main/resources/etc/appprops/titan-cached.properties (renamed from aai-resources/src/main/scripts/run_FixXSD.sh)64
-rw-r--r--aai-resources/src/main/resources/etc/appprops/titan-realtime.properties36
-rw-r--r--aai-resources/src/main/resources/etc/appprops/uniquePropertyCheck-logback.xml85
-rw-r--r--aai-resources/src/main/resources/etc/auth/aai-client-cert.p12bin0 -> 5924 bytes
-rw-r--r--aai-resources/src/main/resources/etc/auth/aai_keystorebin0 -> 3811 bytes
-rw-r--r--aai-resources/src/main/resources/etc/auth/realm.properties (renamed from aai-resources/src/main/config/realm.properties)0
-rw-r--r--aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json14
-rw-r--r--aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json22
-rw-r--r--aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json14
-rw-r--r--aai-resources/src/main/resources/etc/tmp-config/hbase-site.xml52
-rw-r--r--aai-resources/src/main/resources/etc/tosca-migration-data/edgeLabelMigration.csv212
-rw-r--r--aai-resources/src/main/resources/localhost-access-logback.xml62
-rw-r--r--aai-resources/src/main/resources/logback.xml391
-rw-r--r--aai-resources/src/main/resources/logmessages.properties6
-rw-r--r--aai-resources/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context1
-rw-r--r--aai-resources/src/main/runtime/context/default#0.context1
-rw-r--r--aai-resources/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json1
-rw-r--r--aai-resources/src/main/runtime/shiroRole/ajscadmin.json1
-rw-r--r--aai-resources/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--aai-resources/src/main/runtime/shiroRole/contextadmin#default.json1
-rw-r--r--aai-resources/src/main/runtime/shiroUser/ajsc.json1
-rw-r--r--aai-resources/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json1
-rw-r--r--aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json1
-rw-r--r--aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json1
-rw-r--r--aai-resources/src/main/scripts/audit_schema.sh33
-rw-r--r--aai-resources/src/main/scripts/common_functions.sh56
-rw-r--r--aai-resources/src/main/scripts/createDBSchema.sh27
-rw-r--r--aai-resources/src/main/scripts/deleteTool.sh4
-rw-r--r--aai-resources/src/main/scripts/dupeTool.sh73
-rw-r--r--aai-resources/src/main/scripts/dynamicPayloadArchive.sh7
-rw-r--r--aai-resources/src/main/scripts/dynamicPayloadGenerator.sh42
-rw-r--r--aai-resources/src/main/scripts/edgeTagger.sh40
-rw-r--r--aai-resources/src/main/scripts/forceDeleteTool.sh32
-rw-r--r--aai-resources/src/main/scripts/getTool.sh6
-rw-r--r--aai-resources/src/main/scripts/install/addManualData.sh14
-rw-r--r--aai-resources/src/main/scripts/putTool.sh8
-rw-r--r--aai-resources/src/main/scripts/rshipTool.sh10
-rw-r--r--aai-resources/src/main/scripts/run_DbTestProcessBuilder.sh51
-rw-r--r--aai-resources/src/main/scripts/run_Migrations.sh81
-rw-r--r--aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/docker-compose.template.yaml41
-rw-r--r--aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/execTool.sh18
-rwxr-xr-xaai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/kill_resources.sh7
-rwxr-xr-xaai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/start_resources.sh6
-rwxr-xr-xaai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/stop_resources.sh6
-rw-r--r--aai-resources/src/main/swm/package/nix/install/postproc/post_proc4
-rw-r--r--aai-resources/src/main/swm/package/nix/install/preproc/pre_proc4
141 files changed, 6748 insertions, 2771 deletions
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/FileMonitorBeans.xml b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/FileMonitorBeans.xml
deleted file mode 100644
index 37e929d..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/FileMonitorBeans.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
-
- <!--<bean-->
- <!--class="org.springframework.context.annotation.CommonAnnotationBeanPostProcessor" />-->
- <!---->
- <!--<bean id="ServicePropertiesListener" class="org.onap.aai.ajsc_aai.filemonitor.ServicePropertiesListener" />-->
- <!---->
- <!--<bean id="ServicePropertiesMap" class="org.onap.aai.ajsc_aai.filemonitor.ServicePropertiesMap" />-->
-
- <!--<bean id="ServicePropertyService" class="org.onap.aai.ajsc_aai.filemonitor.ServicePropertyService">-->
- <!--<property name="loadOnStartup" value="false" />-->
- <!--<property name="fileChangedListener" ref="ServicePropertiesListener" />-->
- <!--<property name="filePropertiesMap" ref="ServicePropertiesMap" />-->
- <!--<property name="ssfFileMonitorPollingInterval" value="15" />-->
- <!--<property name="ssfFileMonitorThreadpoolSize" value="10" />-->
- <!--</bean>-->
-</beans>
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/jaxrsBeans.groovy b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/jaxrsBeans.groovy
deleted file mode 100644
index 0b036c6..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/jaxrsBeans.groovy
+++ /dev/null
@@ -1,29 +0,0 @@
-beans{
- xmlns cxf: "http://camel.apache.org/schema/cxf"
- xmlns jaxrs: "http://cxf.apache.org/jaxrs"
- xmlns util: "http://www.springframework.org/schema/util"
-
- LegacyMoxyConsumer(org.onap.aai.rest.LegacyMoxyConsumer)
- URLFromVertexIdConsumer(org.onap.aai.rest.URLFromVertexIdConsumer)
- VertexIdConsumer(org.onap.aai.rest.VertexIdConsumer)
- BulkAddConsumer(org.onap.aai.rest.BulkAddConsumer)
- BulkProcessConsumer(org.onap.aai.rest.BulkProcessConsumer)
- ExampleConsumer(org.onap.aai.rest.ExampleConsumer)
- V3ThroughV7Consumer(org.onap.aai.rest.retired.V3ThroughV7Consumer)
- EchoResponse(org.onap.aai.rest.util.EchoResponse)
- ModelVersionTransformer(org.onap.aai.rest.tools.ModelVersionTransformer)
-
- util.list(id: 'jaxrsServices') {
-
- ref(bean:'ExampleConsumer')
- ref(bean:'LegacyMoxyConsumer')
- ref(bean:'VertexIdConsumer')
- ref(bean:'URLFromVertexIdConsumer')
- ref(bean:'BulkAddConsumer')
- ref(bean:'BulkProcessConsumer')
- ref(bean:'V3ThroughV7Consumer')
- ref(bean:'ModelVersionTransformer')
-
- ref(bean:'EchoResponse')
- }
-}
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/serviceBeans.xml b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/serviceBeans.xml
deleted file mode 100644
index 578fa6f..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/conf/serviceBeans.xml
+++ /dev/null
@@ -1,101 +0,0 @@
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:cxf="http://cxf.apache.org/core"
- xmlns:context="http://www.springframework.org/schema/context"
- xmlns:task="http://www.springframework.org/schema/task" xmlns:jms="http://www.springframework.org/schema/jms"
- xsi:schemaLocation="
- http://cxf.apache.org/core
- http://cxf.apache.org/schemas/core.xsd
- http://www.springframework.org/schema/beans
- http://www.springframework.org/schema/beans/spring-beans.xsd
- http://www.springframework.org/schema/context
- http://www.springframework.org/schema/context/spring-context-2.5.xsd
- http://www.springframework.org/schema/task
- http://www.springframework.org/schema/task/spring-task-3.0.xsd
- http://www.springframework.org/schema/jms
- http://www.springframework.org/schema/jms/spring-jms-4.1.xsd">
-
- <!-- Your bean definitions goes here -->
- <!-- <bean id="performanceLog" name="performanceLog" class="com.att.ajsc.csi.logging.PerformanceTracking"
- /> -->
- <!-- <bean id="processRestletHeaders" name="processRestletHeaders" class="ajsc.restlet.ProcessRestletHeaders"
- /> -->
- <!--<bean id="servicePropsBean" name="servicePropsBean"-->
- <!--class="org.onap.aai.ajsc_aai.util.ServicePropertiesMapBean" />-->
- <bean id="jsonProvider" class="org.onap.aai.restcore.CustomJacksonJaxBJsonProvider" />
-
- <bean id="inInterceptor" class="org.onap.aai.interceptors.AAILogJAXRSInInterceptor" />
- <bean id="outInterceptor" class="org.onap.aai.interceptors.AAILogJAXRSOutInterceptor" />
- <!--<bean id="readInInterceptor" class="org.onap.aai.interceptors.AAICXFReadPhaseInterceptor" />-->
- <cxf:bus bus="cxfBus">
- <cxf:inInterceptors>
- <ref bean="inInterceptor" />
- <!--<ref bean="readInInterceptor" />-->
- </cxf:inInterceptors>
- <cxf:outInterceptors>
- <ref bean="outInterceptor" />
- </cxf:outInterceptors>
- </cxf:bus>
- <context:component-scan base-package="org.onap.aai.tasks" />
- <context:component-scan base-package="org.onap.aai.config" />
-
- <task:scheduler id="taskScheduler" pool-size="10" />
- <task:executor id="taskExecutor" pool-size="10"
- queue-capacity="5" />
- <task:annotation-driven executor="taskExecutor"
- scheduler="taskScheduler" />
-
- <bean id="jmsProperties"
- class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer"
- name="jmsProperties">
- <property name="order" value="99999" />
- <property name="systemPropertiesModeName" value="SYSTEM_PROPERTIES_MODE_OVERRIDE" />
- <property name="ignoreUnresolvablePlaceholders" value="true" />
- <property name="properties">
- <value>
-
- <!-- JMS -->
- JMS.BROKER.URL=tcp://localhost:61447
- JMS.QUEUE.NAME=IN_QUEUE
-
- </value>
- </property>
- </bean>
-
- <!-- ActiveMQ connection factory -->
- <bean id="amqConnectionFactory" class="org.apache.activemq.ActiveMQConnectionFactory">
- <constructor-arg index="0" value="${JMS.BROKER.URL}" />
- </bean>
-
- <!-- ConnectionFactory Definition -->
- <bean id="connectionFactory"
- class="org.springframework.jms.connection.CachingConnectionFactory">
- <constructor-arg ref="amqConnectionFactory" />
- </bean>
-
- <!-- Destination Queue -->
- <bean id="destinationQueue" class="org.apache.activemq.command.ActiveMQQueue">
- <constructor-arg index="0" value="${JMS.QUEUE.NAME}" />
- </bean>
-
- <!-- JmsTemplate Definition -->
- <bean id="jmsTemplate" class="org.springframework.jms.core.JmsTemplate">
- <property name="connectionFactory" ref="connectionFactory" />
- <property name="defaultDestination" ref="destinationQueue" />
- </bean>
-
- <!-- Message Producer -->
- <bean id="messageProducer" class="org.onap.aai.dmaap.AAIDmaapEventJMSProducer" />
-
- <!-- Message Consumer from Default Destination -->
- <bean id="messageDefaultConsumer" class="org.onap.aai.dmaap.AAIDmaapEventJMSConsumer" />
-
- <!-- Message Consumer Container for Default Destination -->
- <bean
- class="org.springframework.jms.listener.DefaultMessageListenerContainer">
- <property name="connectionFactory" ref="connectionFactory" />
- <property name="destinationName" value="${JMS.QUEUE.NAME}" />
- <property name="messageListener" ref="messageDefaultConsumer" />
- </bean>
-
-
-</beans>
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/docs/README.txt b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/docs/README.txt
deleted file mode 100644
index 3707179..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/docs/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-Place any docs here that you want to access within the ajsc upon deployment of your service.
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/lib/README.txt b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/lib/README.txt
deleted file mode 100644
index 639e21b..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/lib/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-3rd party JAR's needed by your jars (if any) for a ajsc deployment package go here... \ No newline at end of file
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/props/module.props b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/props/module.props
deleted file mode 100644
index 17ebc08..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/props/module.props
+++ /dev/null
@@ -1 +0,0 @@
-EXAMPLE.PROPERTY=EXAMLE_VALUE \ No newline at end of file
diff --git a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/routes/aai.route b/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/routes/aai.route
deleted file mode 100644
index 6a86246..0000000
--- a/aai-resources/src/main/ajsc/ajsc-aai_v1/ajsc-aai/v1/routes/aai.route
+++ /dev/null
@@ -1,4 +0,0 @@
-<route xmlns="http://camel.apache.org/schema/spring" trace="true">
- <from uri="att-dme2-servlet:///aai?matchOnUriPrefix=true" />
- <to uri="cxfbean:jaxrsServices?providers=#jsonProvider&amp;bus=#cxfBus" />
-</route> \ No newline at end of file
diff --git a/aai-resources/src/main/assemble/ajsc_module_assembly.xml b/aai-resources/src/main/assemble/ajsc_module_assembly.xml
deleted file mode 100644
index 4ec4e28..0000000
--- a/aai-resources/src/main/assemble/ajsc_module_assembly.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<assembly
- xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
- <id>${version}</id>
- <includeBaseDirectory>false</includeBaseDirectory>
- <formats>
- <format>zip</format>
- </formats>
- <fileSets>
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/routes/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/routes/</outputDirectory>
- <includes>
- <include>*.route</include>
- </includes>
-
- </fileSet>
-
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/docs/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/docs/</outputDirectory>
- <includes>
- <include>*.*</include>
- <!-- <include>*.vm</include> -->
- </includes>
-
- </fileSet>
-
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/lib/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
- <includes>
- <include>*.jar</include>
- </includes>
-
- </fileSet>
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/extJars/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/extJars/</outputDirectory>
- <includes>
- <include>*.jar</include>
- </includes>
- </fileSet>
-
- <!-- also try to grab outputs from the "jar" plugin's package phase -->
- <fileSet>
- <directory>${project.basedir}/target/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/lib/</outputDirectory>
- <includes>
- <include>*.jar</include>
- </includes>
- </fileSet>
-
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/conf/</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/conf/</outputDirectory>
- <includes>
- <include>*.*</include>
- </includes>
-
- </fileSet>
- </fileSets>
-
-</assembly>
-
diff --git a/aai-resources/src/main/assemble/ajsc_props_assembly.xml b/aai-resources/src/main/assemble/ajsc_props_assembly.xml
deleted file mode 100644
index 5b8a6fa..0000000
--- a/aai-resources/src/main/assemble/ajsc_props_assembly.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<assembly
- xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
- <id>${version}_properties</id>
- <includeBaseDirectory>false</includeBaseDirectory>
- <formats>
- <format>zip</format>
- </formats>
- <fileSets>
- <fileSet>
- <directory>${project.basedir}/target/versioned-ajsc/props</directory>
- <outputDirectory>${module.ajsc.namespace.name}/${module.ajsc.namespace.version}/props/</outputDirectory>
- <includes>
- <include>*.props</include>
- </includes>
-
- </fileSet>
-
- </fileSets>
-
-</assembly>
-
diff --git a/aai-resources/src/main/assemble/ajsc_runtime_assembly.xml b/aai-resources/src/main/assemble/ajsc_runtime_assembly.xml
deleted file mode 100644
index e37d366..0000000
--- a/aai-resources/src/main/assemble/ajsc_runtime_assembly.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-<assembly
- xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
- <id>runtimeEnvironment</id>
- <includeBaseDirectory>false</includeBaseDirectory>
- <formats>
- <format>zip</format>
- </formats>
- <fileSets>
- <fileSet>
- <directory>${project.basedir}/target/versioned-runtime/context/</directory>
- <outputDirectory>runtime/context/</outputDirectory>
- <includes>
- <include>*.context</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>${project.basedir}/target/versioned-runtime/serviceProperties/</directory>
- <outputDirectory>runtime/serviceProperties/</outputDirectory>
- <includes>
- <include>*.props</include>
- </includes>
- </fileSet><fileSet>
- <directory>${project.basedir}/target/versioned-runtime/shiroRole</directory>
- <outputDirectory>runtime/shiroRole/</outputDirectory>
- <includes>
- <include>*.json</include>
- </includes>
- </fileSet><fileSet>
- <directory>${project.basedir}/target/versioned-runtime/shiroUser</directory>
- <outputDirectory>runtime/shiroUser/</outputDirectory>
- <includes>
- <include>*.json</include>
- </includes>
- </fileSet><fileSet>
- <directory>${project.basedir}/target/versioned-runtime/shiroUserRole</directory>
- <outputDirectory>runtime/shiroUserRole</outputDirectory>
- <includes>
- <include>*.json</include>
- </includes>
- </fileSet>
- </fileSets>
-</assembly> \ No newline at end of file
diff --git a/aai-resources/src/main/assembly/descriptor.xml b/aai-resources/src/main/assembly/descriptor.xml
new file mode 100644
index 0000000..b3a8ab0
--- /dev/null
+++ b/aai-resources/src/main/assembly/descriptor.xml
@@ -0,0 +1,32 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+ <id>build</id>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <formats>
+ <format>dir</format>
+ </formats>
+ <fileSets>
+ <fileSet>
+ <directory>${project.basedir}/src/main/resources</directory>
+ <outputDirectory>/resources</outputDirectory>
+ <includes>
+ <include>**/*</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${project.basedir}/src/main/scripts</directory>
+ <outputDirectory>/bin</outputDirectory>
+ <includes>
+ <include>**/*</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>${project.build.directory}</directory>
+ <outputDirectory>/lib</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+ </fileSets>
+</assembly>
diff --git a/aai-resources/src/main/config/ajsc-jetty.xml b/aai-resources/src/main/config/ajsc-jetty.xml
deleted file mode 100644
index de31867..0000000
--- a/aai-resources/src/main/config/ajsc-jetty.xml
+++ /dev/null
@@ -1,164 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
-<Configure id="ajsc-server" class="org.eclipse.jetty.server.Server">
- <!-- DO NOT REMOVE!!!! This is setting up the AJSC Context -->
- <New id="ajscContext" class="org.eclipse.jetty.webapp.WebAppContext">
- <Set name="contextPath"><SystemProperty name="AJSC_CONTEXT_PATH" /></Set>
- <Set name="extractWAR">true</Set>
- <Set name="tempDirectory"><SystemProperty name="AJSC_TEMP_DIR" /></Set>
- <Set name="war"><SystemProperty name="AJSC_WAR_PATH" /></Set>
- <Set name="descriptor"><SystemProperty name="AJSC_HOME" />/etc/runner-web.xml</Set>
- <Set name="overrideDescriptor"><SystemProperty name="AJSC_HOME" />/etc/ajsc-override-web.xml</Set>
- <Set name="throwUnavailableOnStartupException">true</Set>
- <Set name="servletHandler">
- <New class="org.eclipse.jetty.servlet.ServletHandler">
- <Set name="startWithUnavailable">false</Set>
- </New>
- </Set>
- <Set name="extraClasspath">
- <SystemProperty name="AJSC_HOME" />/extJars/aai-core-<SystemProperty name="aai-core.version" />.jar,
- <SystemProperty name="AJSC_HOME" />/extJars/aai-resources.jar,
- <SystemProperty name="AJSC_HOME" />/extJars/logback-core-1.1.7.jar,
- <SystemProperty name="AJSC_HOME" />/extJars/logback-access-1.1.7.jar,
- <SystemProperty name="AJSC_HOME" />/extJars/eelf-core-1.0.0.jar,
- <SystemProperty name="AJSC_HOME" />/extJars/slf4j-api-1.7.21.jar
- </Set>
- </New>
-
- <Set name="handler">
- <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
- <Set name="handlers">
- <Array type="org.eclipse.jetty.server.Handler">
- <Item>
- <New id="Contexts"
- class="org.eclipse.jetty.server.handler.ContextHandlerCollection">
- <Set name="Handlers">
- <Array type="org.eclipse.jetty.webapp.WebAppContext">
- <Item>
- <Ref refid="ajscContext" />
- </Item>
- </Array>
- </Set>
- </New>
- </Item>
- <!-- add a RequestLogHandler -->
- <Item>
- <New id="RequestLogHandler" class="org.eclipse.jetty.server.handler.RequestLogHandler">
- <Set name="requestLog">
- <New id="requestLogImpl" class="ch.qos.logback.access.jetty.RequestLogImpl">
- <Set name="fileName"><SystemProperty name="AJSC_HOME" />/bundleconfig/etc/localhost-access-logback.xml</Set>
- </New>
- </Set>
- </New>
- </Item>
- </Array>
- </Set>
- </New>
- </Set>
-
- <Call name="addBean">
- <Arg>
- <New id="DeploymentManager" class="org.eclipse.jetty.deploy.DeploymentManager">
- <Set name="contexts">
- <Ref refid="Contexts" />
- </Set>
- <Call id="extAppHotDeployProvider" name="addAppProvider">
- <Arg>
- <New class="org.eclipse.jetty.deploy.providers.WebAppProvider">
- <Set name="monitoredDirName"><SystemProperty name="AJSC_HOME" />/extApps</Set>
- <Set name="scanInterval">10</Set>
- <Set name="extractWars">true</Set>
- </New>
- </Arg>
- </Call>
- </New>
- </Arg>
- </Call>
-
- <!--<Call name="addConnector">
- <Arg>
- <New class="org.eclipse.jetty.server.ServerConnector">
- <Arg name="server">
- <Ref refid="ajsc-server" />
- </Arg>
- <Set name="port"><SystemProperty name="AJSC_HTTP_PORT" default="8087" /></Set>
- </New>
- </Arg>
- </Call>-->
-
- <Call id="sslConnector" name="addConnector">
- <Arg>
- <New class="org.eclipse.jetty.server.ServerConnector">
- <Arg name="server">
- <Ref refid="ajsc-server" />
- </Arg>
- <Arg name="factories">
- <Array type="org.eclipse.jetty.server.ConnectionFactory">
- <Item>
- <New class="org.eclipse.jetty.server.SslConnectionFactory">
- <Arg name="next">http/1.1</Arg>
- <Arg name="sslContextFactory">
- <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory">
- <Set name="KeyStorePath">file:<SystemProperty name="AJSC_HOME" />/bundleconfig/etc/auth/aai_keystore</Set>
- <Set name="KeyStorePassword">
- <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
- <Arg><SystemProperty name="KEY_STORE_PASSWORD" /></Arg>
- </Call>
- </Set>
- <Set name="KeyManagerPassword">
- <Call class="org.eclipse.jetty.util.security.Password" name="deobfuscate">
- <Arg><SystemProperty name="KEY_MANAGER_PASSWORD" /></Arg>
- </Call>
- </Set>
- <Set name="needClientAuth">false</Set>
- <Set name="ExcludeProtocols">
- <Array type="java.lang.String">
- <Item>SSL</Item>
- <Item>SSLv2</Item>
- <Item>SSLv2Hello</Item>
- <Item>SSLv3</Item>
- <Item>TLSv1</Item>
- </Array>
- </Set>
- </New>
- </Arg>
- </New>
- </Item>
- <Item>
- <New class="org.eclipse.jetty.server.HttpConnectionFactory">
- <Arg name="config">
- <New class="org.eclipse.jetty.server.HttpConfiguration">
- <Call name="addCustomizer">
- <Arg>
- <New class="org.eclipse.jetty.server.SecureRequestCustomizer" />
- </Arg>
- </Call>
- </New>
- </Arg>
- </New>
- </Item>
- </Array>
- </Arg>
- <Set name="port"><SystemProperty name="AJSC_HTTPS_PORT" default="8447" /></Set>
- <Set name="idleTimeout">30000</Set>
- </New>
- </Arg>
- </Call>
-
- <Get name="ThreadPool">
- <Set name="minThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MIN" /></Set>
- <Set name="maxThreads"><SystemProperty name="AJSC_JETTY_ThreadCount_MAX" /></Set>
- <Set name="idleTimeout"><SystemProperty name="AJSC_JETTY_IDLETIME_MAX" /></Set>
- <Set name="detailedDump">false</Set>
- </Get>
- <Call name="addBean">
- <Arg>
- <New class="org.eclipse.jetty.security.HashLoginService">
- <Set name="name">Test Realm</Set>
- <Set name="config"><SystemProperty name="AJSC_HOME" />/etc/realm.properties</Set>
- <Set name="refreshInterval">5</Set>
- <Call name="start"></Call>
- </New>
- </Arg>
- </Call>
-</Configure>
diff --git a/aai-resources/src/main/config/ajsc-jolokia-override-web.xml b/aai-resources/src/main/config/ajsc-jolokia-override-web.xml
deleted file mode 100644
index b242129..0000000
--- a/aai-resources/src/main/config/ajsc-jolokia-override-web.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
- metadata-complete="false" version="3.0">
-
- <filter-mapping>
- <filter-name>InterceptorFilter</filter-name>
- <url-pattern>/services/*</url-pattern>
- </filter-mapping>
- <filter-mapping>
- <filter-name>InterceptorFilter</filter-name>
- <url-pattern>/rest/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>springSecurityFilterChain</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
-
- <servlet-mapping>
- <servlet-name>ManagementServlet</servlet-name>
- <url-pattern>/mgmt</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>RestletServlet</servlet-name>
- <url-pattern>/rest/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>CamelServlet</servlet-name>
- <url-pattern>/services/*</url-pattern>
- </servlet-mapping>
-
- <servlet>
- <servlet-name>jolokia-agent</servlet-name>
- <servlet-class>org.jolokia.http.AgentServlet</servlet-class>
- <load-on-startup>2</load-on-startup>
- </servlet>
-
- <servlet-mapping>
- <servlet-name>jolokia-agent</servlet-name>
- <url-pattern>/jolokia/*</url-pattern>
- </servlet-mapping>
-
-</web-app> \ No newline at end of file
diff --git a/aai-resources/src/main/config/ajsc-override-web.xml b/aai-resources/src/main/config/ajsc-override-web.xml
deleted file mode 100644
index 61e2836..0000000
--- a/aai-resources/src/main/config/ajsc-override-web.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
- metadata-complete="false" version="3.0">
-
- <filter-mapping>
- <filter-name>WriteableRequestFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
- <filter-mapping>
- <filter-name>InterceptorFilter</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
- <filter-mapping>
- <filter-name>InterceptorFilter</filter-name>
- <url-pattern>/rest/*</url-pattern>
- </filter-mapping>
-
- <filter-mapping>
- <filter-name>springSecurityFilterChain</filter-name>
- <url-pattern>/*</url-pattern>
- </filter-mapping>
-
- <servlet-mapping>
- <servlet-name>ManagementServlet</servlet-name>
- <url-pattern>/mgmt</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>RestletServlet</servlet-name>
- <url-pattern>/rest/*</url-pattern>
- </servlet-mapping>
-
- <servlet-mapping>
- <servlet-name>CamelServlet</servlet-name>
- <url-pattern>/*</url-pattern>
- </servlet-mapping>
-
-
-
-</web-app> \ No newline at end of file
diff --git a/aai-resources/src/main/config/ajsc-request.xml b/aai-resources/src/main/config/ajsc-request.xml
deleted file mode 100644
index 5d09b7a..0000000
--- a/aai-resources/src/main/config/ajsc-request.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<ns1:ErrorTranslationRequest
- xsi:schemaLocation="http://csi.cingular.com/CSI/Namespaces/Types/Private/ErrorTranslationRequest.xsd ErrorTranslationRequest.xsd"
- xmlns:ns1="http://csi.cingular.com/CSI/Namespaces/Types/Private/ErrorTranslationRequest.xsd"
- xmlns:th="http://csi.cingular.com/CSI/Namespaces/Types/Private/Implementation/TransactionHeader.xsd"
- xmlns:err="http://csi.cingular.com/CSI/Namespaces/Types/Public/ErrorResponse.xsd"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <ns1:TransactionHeader>
- <th:applicationId></th:applicationId>
- <th:MessageQualifier>
- <th:messageQualifier></th:messageQualifier>
- </th:MessageQualifier>
- <th:activityCode></th:activityCode>
- <th:sequenceNumber></th:sequenceNumber>
- <th:extendedSequenceNumber></th:extendedSequenceNumber>
- <th:creationDate></th:creationDate>
- <th:transactionDate></th:transactionDate>
- <th:timeToLive></th:timeToLive>
- <th:reasonCode></th:reasonCode>
- <th:systemId></th:systemId>
- <th:operatorId></th:operatorId>
- <th:reference></th:reference>
- <th:replyToAddress></th:replyToAddress>
- <th:originatorId></th:originatorId>
- <th:atlasMessageId></th:atlasMessageId>
- </ns1:TransactionHeader>
- <ns1:operation></ns1:operation>
- <ns1:ServiceEntityFault>
- <err:reportingServiceEntity></err:reportingServiceEntity>
- <err:faultDate></err:faultDate>
- <err:faultSequenceNumber></err:faultSequenceNumber>
- <err:faultLevel></err:faultLevel>
- <err:faultCode></err:faultCode>
- <err:faultDescription></err:faultDescription>
- <err:ServiceProviderRawError>
- <err:code></err:code>
- <err:description></err:description>
- <err:BISError>
- <err:code></err:code>
- <err:description></err:description>
- <err:origination></err:origination>
- <err:severity></err:severity>
- </err:BISError>
- </err:ServiceProviderRawError>
- </ns1:ServiceEntityFault>
- <ns1:conversationID></ns1:conversationID>
- <ns1:partnerName></ns1:partnerName>
- <ns1:isRESTService></ns1:isRESTService>
-</ns1:ErrorTranslationRequest> \ No newline at end of file
diff --git a/aai-resources/src/main/config/hazelcast-client.properties b/aai-resources/src/main/config/hazelcast-client.properties
deleted file mode 100644
index 2624d3f..0000000
--- a/aai-resources/src/main/config/hazelcast-client.properties
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-hazelcast.client.group.name = ajsc
-hazelcast.client.group.pass = ajscpass
-hazelcast.client.connection.timeout = 30000
-hazelcast.client.connection.attempts.limit = 3
-hazelcast.client.reconnection.timeout = 5000
-hazelcast.client.reconnection.attempts.limit= 5
-hazelcast.client.shuffle.addresses = false
-hazelcast.client.update.automatic = true
-hazelcast.client.addresses = localhost, 127.0.0.1 \ No newline at end of file
diff --git a/aai-resources/src/main/config/jul-redirect.properties b/aai-resources/src/main/config/jul-redirect.properties
deleted file mode 100644
index 8b6624d..0000000
--- a/aai-resources/src/main/config/jul-redirect.properties
+++ /dev/null
@@ -1,13 +0,0 @@
-
-# Bridge JUL->slf4j Logging Configuration File
-#
-# This file bridges the JUL logging infrastructure into
-# SLF4J so JUL logs go to logback implementation provided
-# in this project. SLF4J also captures log4j and has
-# other framework options as well providing a common
-# logging infrastructure for capturing all logs from different
-# libraries using different frameworks in one place.
-
-# Global properties
-handlers=org.slf4j.bridge.SLF4JBridgeHandler
-.level= ALL
diff --git a/aai-resources/src/main/config/keyfile b/aai-resources/src/main/config/keyfile
deleted file mode 100644
index 6a1657e..0000000
--- a/aai-resources/src/main/config/keyfile
+++ /dev/null
@@ -1,27 +0,0 @@
-ctRt8XTd7N57kcm0npZOWSDF5I69w9K97cQS_ep0AgxgHmYB0WtYblsrMGuHfyS1o4697zLiIeoS
-Nn5kE1kedl4c4HevfuwfoJpWyiugYusNOqbTGQJ1MHOwqiBEJnjXepZEoz1btaW_hDO7uz-BoD4t
-SxwNRwVQpcg0_CmBX-yIW2YCIECoxZH9_X_8fcXYHP2VgFxxBpvjgycNQlyN15_VSuLwn3Wj0W8_
-8chRxGURyhp8iEBSb4tIdN5jXkhCma7AP7wreMufFQqXjdfWqIisJPfIpS3znl5IiTOZP22XhHay
-gq2KFwABVqjM71m5czEz1ojGkbFEAGImrY-VFHuug2u4ss4VW7TGeJst0z7I5vrn5M6i9Eb6xiGh
-jNUebRCV3cYGrtD9SlvjJBVVeP_3OrkxlD4oktx-JTRJzYtXADB5if2gtpYxy84kqrz7ltr5rXUH
-zSG7ujKCXOOE_Wk6vQPSjYPnum6R_mxOorCNCvtf6ne85Xd81DZlJM-CleVNdOU7g1xie-gBZPAX
-bOvWf6p_pVNmH76v-m4XLAAUqEzt-9PvNmirODiDiY5bNz6l-1ejw8IyQYb37e_3sN_LjF7A9HgB
-Dia7kNjsfB7_2vB7R4qjwNLsmTMnQCDANnNpl9VpotZ4blPhhOWhB1Tg3lxc-z-VRV7GBbl_2eQd
-3eYUT1Z5Li184W4-pft_TCaDJ1NyaJd1CQxQEuIORdq5B6Q2L9SMmmOOh82Czu5_Ro80IGikHXHp
-Lqf2fIaceY_IBAeGp2iPjtXdkghV24vIT49oRfqf6sBKAPy-88xILnMWM6M5bMCETKn7UvM1kV5y
-ZQYlsi-36n73ETZyiFs1PLqe8D6dRURrcBG_B9i1MafNiWa-elG6E0X0pSK9CadchSA0KRMaKtfE
-6-iyUqE-bx-0ELTbV2y7gLdu5MVtjRmQB5ozoaBq8ik4-jAWAsKpTv4DfWoMp9DkRENlKeauayuT
-j_VAGhqy07pIntQKtbK9EP0tndSKtF3WLwHel1I5C3lthhkxxfzpxURBxO1ZJMFJZ6rLu1Ku03zw
-LJ7nFFR_YfJ7tnGZE4PEt7MOZNiNoD3__9PthO5HmZdk1gPMrKlojU1hyR3IlbVShUst6rA3MkWk
-MD-zlw9mhNgaV3xvPJ945pYPe4C6qIwxXoiXGHyhv_0MpcvuMW-pUuAZXfkuiqNwQnpUTLBD0YJw
-uwMbE7sN40e6-BSxEiMOab7s2gShbaK9JjCMQUH_vAuQSZjU4sn53jsS7U4DHntzgxVYttIwGZaU
-b-1R7jYphNJnCI8rPB_xjJ0OMssNKT7lYRgG_ZuKvifYvJWt-NwD0z2qoePcRGExXuioRDNR4SlB
--RN33dYhp6vRsHKT1oLpl-UJB6dqJlZ2dCsfc7vT1Vs0SYidRYXCUJNBSePI4-1LMlHKOqGASBcg
-pl589601-EtO7ch3RoaL26rNXzA-umUWYRPQPZ76wcgK2j4k5Ndub5dWK9jI6UW3RbF6ixe0Yw2j
-_Pipt4EX8R6-sb87D69JOOnZlFVB6EcCO07Q7j6DavpUNHlLmDmPgArqODh002scvW1ryMxBR2XE
-m3kGQh2IFh5Qru8duxblEYE-lmHGxXVgDtKiKgHwPTkaxcquEtZTEJxaIJIgoKj7SgMzdfbeLlJM
-RwbdvExmnRT9ivFImeIV7ACPnfBP3URd82kTG8FyiMvSpdCLL16FWOd9gjZuMstqZrmIVF8tO2WT
-COMIx-jqvQD2zS1Ul5p0szJaf-CxBjy7-cJIaAyEToR1T5bBFtQt4sEFxG7XG0cCoXShqclL70TV
-W13X5pY55YwHkCR4mRjc0o0ZKStY3OADVLFom1bC9AmMBqU4PsKNAX29LT37WE-I23tQgzid0Ix9
-JuVzlbOTvi19uLYbltrHavU3UbVhYxNNI7Y7tM02xfq3LhGqZG5EPS-WAB9bBixHQqw78cd9iqIr
-hHlZW80l1kgs1ezMqgxfwDuiFOZIu9UWQ6vSnTAvfhwJhcr77gSk5Gu957uxzleaS4gVwTYU
diff --git a/aai-resources/src/main/config/logback-migration.xml b/aai-resources/src/main/config/logback-migration.xml
deleted file mode 100644
index 88ade33..0000000
--- a/aai-resources/src/main/config/logback-migration.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<!--
- ============LICENSE_START=======================================================
- org.onap.aai
- ================================================================================
- Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
- -->
-
-<configuration>
- <appender name="MIGRATION_FILE_LOG"
- class="ch.qos.logback.core.FileAppender">
- <append>true</append>
- <file>logs/migrationAic3.log</file>
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">
- <Pattern>%a %u %z [%t] "%m %U" %s %b</Pattern>
- </encoder>
- </appender>
- <root level="DEBUG">
- <appender-ref ref="MIGRATION_FILE_LOG" />
- </root>
-</configuration>
-
-<!--
-%a - Remote IP address
-%A - Local IP address
-%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent
-%B - Bytes sent, excluding HTTP headers
-%h - Remote host name
-%H - Request protocol
-%l - Remote logical username from identd (always returns '-')
-%m - Request method
-%p - Local port
-%q - Query string (prepended with a '?' if it exists, otherwise an empty string
-%r - First line of the request
-%s - HTTP status code of the response
-%S - User session ID
-%t - Date and time, in Common Log Format format
-%u - Remote user that was authenticated
-%U - Requested URL path
-%v - Local server name
-%I - current request thread name (can compare later with stacktraces)
-
-%z - Custom pattern that parses the cert for the subject
- -->
diff --git a/aai-resources/src/main/config/runner-web.xml b/aai-resources/src/main/config/runner-web.xml
deleted file mode 100644
index 95d663c..0000000
--- a/aai-resources/src/main/config/runner-web.xml
+++ /dev/null
@@ -1,106 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
- metadata-complete="false" version="3.0">
-
- <context-param>
- <param-name>contextConfigLocation</param-name>
- <param-value>/WEB-INF/spring-servlet.xml,
- classpath:applicationContext.xml
- </param-value>
- </context-param>
-
- <context-param>
- <param-name>spring.profiles.default</param-name>
- <param-value>nooauth</param-value>
- </context-param>
-
- <listener>
- <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
- </listener>
-
- <servlet>
- <servlet-name>ManagementServlet</servlet-name>
- <servlet-class>ajsc.ManagementServlet</servlet-class>
- </servlet>
-
- <filter>
- <filter-name>WriteableRequestFilter</filter-name>
- <filter-class>com.att.ajsc.csi.writeablerequestfilter.WriteableRequestFilter</filter-class>
- </filter>
-
- <filter>
- <filter-name>InterceptorFilter</filter-name>
- <filter-class>ajsc.filters.InterceptorFilter</filter-class>
- <init-param>
- <param-name>preProcessor_interceptor_config_file</param-name>
- <param-value>/etc/PreProcessorInterceptors.properties</param-value>
- </init-param>
- <init-param>
- <param-name>postProcessor_interceptor_config_file</param-name>
- <param-value>/etc/PostProcessorInterceptors.properties</param-value>
- </init-param>
-
- </filter>
-
- <servlet>
- <servlet-name>RestletServlet</servlet-name>
- <servlet-class>ajsc.restlet.RestletSpringServlet</servlet-class>
- <init-param>
- <param-name>org.restlet.component</param-name>
- <param-value>restletComponent</param-value>
- </init-param>
- </servlet>
-
- <servlet>
- <servlet-name>CamelServlet</servlet-name>
- <servlet-class>ajsc.servlet.AjscCamelServlet</servlet-class>
- </servlet>
-
-
- <filter>
- <filter-name>springSecurityFilterChain</filter-name>
- <filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
- </filter>
-
- <servlet>
- <servlet-name>spring</servlet-name>
- <servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
- <load-on-startup>1</load-on-startup>
- </servlet>
-
-
-
- <servlet-mapping>
- <servlet-name>spring</servlet-name>
- <url-pattern>/</url-pattern>
- </servlet-mapping>
-
- <listener>
- <listener-class>
- org.onap.aai.util.AAIAppServletContextListener
- </listener-class>
- </listener>
-
- <security-constraint>
- <web-resource-collection>
- <web-resource-name>Open Source</web-resource-name>
- <url-pattern>/aai/*</url-pattern>
- </web-resource-collection>
- <auth-constraint>
- <role-name>admin</role-name>
- </auth-constraint>
- <user-data-constraint>
- <transport-guarantee>CONFIDENTIAL</transport-guarantee>
- </user-data-constraint>
- </security-constraint>
-
- <login-config>
- <auth-method>BASIC</auth-method>
- <realm-name>Test Realm</realm-name>
- </login-config>
-
- <security-role>
- <role-name>admin</role-name>
- </security-role>
-</web-app>
diff --git a/aai-resources/src/main/docker/Dockerfile b/aai-resources/src/main/docker/Dockerfile
new file mode 100644
index 0000000..be8af9c
--- /dev/null
+++ b/aai-resources/src/main/docker/Dockerfile
@@ -0,0 +1,22 @@
+FROM aaionap/aai-common:1.2.0
+
+
+# Add the proper files into the docker image from your build
+WORKDIR /opt/app/aai-resources
+
+# Expose the ports for outside linux to use
+# 8447 is the important one to be used
+EXPOSE 8447
+
+
+HEALTHCHECK --interval=40s --timeout=10s --retries=3 CMD nc -z -v localhost 8447 || exit 1
+
+ENTRYPOINT ["/bin/bash", "/opt/app/aai-resources/docker-entrypoint.sh"]
+
+RUN mkdir -p /opt/aaihome/aaiadmin /opt/aai/logroot/AAI-RES
+
+VOLUME /opt/aai/logroot/AAI-RES
+
+COPY /maven/aai-resources/ .
+
+ENV AAI_BUILD_VERSION @aai.docker.version@
diff --git a/aai-resources/src/main/resources/docker/aai.sh b/aai-resources/src/main/docker/aai.sh
index 2e2c35f..2e2c35f 100644
--- a/aai-resources/src/main/resources/docker/aai.sh
+++ b/aai-resources/src/main/docker/aai.sh
diff --git a/aai-resources/src/main/docker/docker-entrypoint.sh b/aai-resources/src/main/docker/docker-entrypoint.sh
new file mode 100644
index 0000000..05abc71
--- /dev/null
+++ b/aai-resources/src/main/docker/docker-entrypoint.sh
@@ -0,0 +1,110 @@
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+APP_HOME=$(pwd);
+RESOURCES_HOME=${APP_HOME}/resources/;
+
+export CHEF_CONFIG_REPO=${CHEF_CONFIG_REPO:-aai-config};
+export CHEF_GIT_URL=${CHEF_GIT_URL:-http://gerrit.onap.org/r/aai};
+export CHEF_CONFIG_GIT_URL=${CHEF_CONFIG_GIT_URL:-$CHEF_GIT_URL};
+export CHEF_DATA_GIT_URL=${CHEF_DATA_GIT_URL:-$CHEF_GIT_URL};
+
+export SERVER_PORT=${SERVER_PORT:-8447};
+
+USER_ID=${LOCAL_USER_ID:-9001}
+GROUP_ID=${LOCAL_GROUP_ID:-9001}
+
+ln -s bin scripts
+ln -s /opt/aai/logroot/AAI-RES logs
+
+echo "Project Build Version: ${aai.build.version}";
+
+if [ $(cat /etc/passwd | grep aaiadmin | wc -l) -eq 0 ]; then
+ groupadd aaiadmin -g ${GROUP_ID} || {
+ echo "Unable to create the group id for ${GROUP_ID}";
+ exit 1;
+ }
+ useradd --shell=/bin/bash -u ${USER_ID} -g ${GROUP_ID} -o -c "" -m aaiadmin || {
+ echo "Unable to create the user id for ${USER_ID}";
+ exit 1;
+ }
+fi;
+
+chown -R aaiadmin:aaiadmin /opt/app /opt/aai/logroot /var/chef
+find /opt/app/ -name "*.sh" -exec chmod +x {} +
+
+if [ -f ${APP_HOME}/aai.sh ]; then
+ mv ${APP_HOME}/aai.sh /etc/profile.d/aai.sh
+ chmod 755 /etc/profile.d/aai.sh
+
+ gosu aaiadmin /opt/app/aai-resources/scripts/createDBSchema.sh || exit 1
+fi;
+
+JAVA_CMD="exec gosu aaiadmin java";
+
+JVM_OPTS="${PRE_JVM_OPTS} -XX:+UnlockDiagnosticVMOptions";
+JVM_OPTS="${JVM_OPTS} -XX:+UnsyncloadClass";
+JVM_OPTS="${JVM_OPTS} -XX:+UseConcMarkSweepGC";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled";
+JVM_OPTS="${JVM_OPTS} -XX:+UseCMSInitiatingOccupancyOnly";
+JVM_OPTS="${JVM_OPTS} -XX:CMSInitiatingOccupancyFraction=70";
+JVM_OPTS="${JVM_OPTS} -XX:+ScavengeBeforeFullGC";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSScavengeBeforeRemark";
+JVM_OPTS="${JVM_OPTS} -XX:-HeapDumpOnOutOfMemoryError";
+JVM_OPTS="${JVM_OPTS} -XX:+UseParNewGC";
+JVM_OPTS="${JVM_OPTS} -verbose:gc";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
+JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=512M";
+JVM_OPTS="${JVM_OPTS} -XX:PermSize=512M";
+JVM_OPTS="${JVM_OPTS} -server";
+JVM_OPTS="${JVM_OPTS} -XX:NewSize=512m";
+JVM_OPTS="${JVM_OPTS} -XX:MaxNewSize=512m";
+JVM_OPTS="${JVM_OPTS} -XX:SurvivorRatio=8";
+JVM_OPTS="${JVM_OPTS} -XX:+DisableExplicitGC";
+JVM_OPTS="${JVM_OPTS} -verbose:gc";
+JVM_OPTS="${JVM_OPTS} -XX:+UseParNewGC";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSClassUnloadingEnabled";
+JVM_OPTS="${JVM_OPTS} -XX:+UseConcMarkSweepGC";
+JVM_OPTS="${JVM_OPTS} -XX:-UseBiasedLocking";
+JVM_OPTS="${JVM_OPTS} -XX:ParallelGCThreads=4";
+JVM_OPTS="${JVM_OPTS} -XX:LargePageSizeInBytes=128m";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
+JVM_OPTS="${JVM_OPTS} -Xloggc:/opt/app/aai-resources/logs/ajsc-jetty/gc/aai_gc.log";
+JVM_OPTS="${JVM_OPTS} -Dsun.net.inetaddr.ttl=180";
+JVM_OPTS="${JVM_OPTS} -XX:+HeapDumpOnOutOfMemoryError";
+JVM_OPTS="${JVM_OPTS} -XX:HeapDumpPath=/opt/app/aai-resources/logs/ajsc-jetty/heap-dump";
+JVM_OPTS="${JVM_OPTS} ${POST_JVM_OPTS}";
+
+JAVA_OPTS="${PRE_JAVA_OPTS} -DAJSC_HOME=$APP_HOME";
+JAVA_OPTS="${JAVA_OPTS} -Dserver.port=${SERVER_PORT}";
+JAVA_OPTS="${JAVA_OPTS} -DBUNDLECONFIG_DIR=./resources";
+JAVA_OPTS="${JAVA_OPTS} -Dserver.local.startpath=${RESOURCES_HOME}";
+JAVA_OPTS="${JAVA_OPTS} -DAAI_CHEF_ENV=${AAI_CHEF_ENV}";
+JAVA_OPTS="${JAVA_OPTS} -DSCLD_ENV=${SCLD_ENV}";
+JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom";
+JAVA_OPTS="${JAVA_OPTS} -Dloader.path=$APP_HOME/resources";
+JAVA_OPTS="${JAVA_OPTS} ${POST_JAVA_OPTS}";
+
+JAVA_MAIN_JAR=$(ls lib/aai-resources*.jar);
+
+${JAVA_CMD} ${JVM_OPTS} ${JAVA_OPTS} -jar ${JAVA_MAIN_JAR};
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java b/aai-resources/src/main/java/org/onap/aai/Profiles.java
index 38ea8c6..9f00466 100644
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java
+++ b/aai-resources/src/main/java/org/onap/aai/Profiles.java
@@ -19,20 +19,14 @@
*
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*/
-package org.onap.aai.ajsc_aai.filemonitor;
+package org.onap.aai;
-import java.io.File;
+public final class Profiles {
-//import com.att.ssf.filemonitor.FileChangedListener;
+ public static final String DMAAP = "dmaap";
-//public class ServicePropertiesListener implements FileChangedListener {
+ public static final String ONE_WAY_SSL = "one-way-ssl";
+ public static final String TWO_WAY_SSL = "two-way-ssl";
- /**
- * {@inheritDoc}
- */
- //@Override
- //public void update(File file) throws Exception
- //{
- //ServicePropertiesMap.refresh(file);
- //}
-//}
+ private Profiles(){}
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/ResourcesApp.java b/aai-resources/src/main/java/org/onap/aai/ResourcesApp.java
new file mode 100644
index 0000000..c435053
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/ResourcesApp.java
@@ -0,0 +1,159 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.ModelInjestor;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.migration.MigrationControllerInternal;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
+import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
+import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration;
+import org.springframework.cloud.netflix.ribbon.RibbonClient;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.core.env.Environment;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import java.util.UUID;
+
+@SpringBootApplication
+// Component Scan provides a way to look for spring beans
+// It only searches beans in the following packages
+// Any method annotated with @Bean annotation or any class
+// with @Component, @Configuration, @Service will be picked up
+@ComponentScan(basePackages = {
+ "org.onap.aai.config",
+ "org.onap.aai.web",
+ "org.onap.aai.tasks",
+ "org.onap.aai.rest"
+})
+@EnableAutoConfiguration(exclude = {
+ DataSourceAutoConfiguration.class,
+ DataSourceTransactionManagerAutoConfiguration.class,
+ HibernateJpaAutoConfiguration.class
+})
+@RibbonClient(name = "dmaap", configuration = AAIRibbonConfiguration.class)
+public class ResourcesApp {
+
+ private static final EELFLogger logger = EELFManager.getInstance().getLogger(ResourcesApp.class.getName());
+
+ private static final String APP_NAME = "aai-resources";
+
+ @Autowired
+ private Environment env;
+
+ @PostConstruct
+ private void init() throws AAIException {
+ System.setProperty("org.onap.aai.serverStarted", "false");
+ setDefaultProps();
+
+ LoggingContext.save();
+ LoggingContext.component("init");
+ LoggingContext.partnerName("NA");
+ LoggingContext.targetEntity(APP_NAME);
+ LoggingContext.requestId(UUID.randomUUID().toString());
+ LoggingContext.serviceName(APP_NAME);
+ LoggingContext.targetServiceName("contextInitialized");
+
+ logger.info("AAI Server initialization started...");
+
+ // Setting this property to allow for encoded slash (/) in the path parameter
+ // This is only needed for tomcat keeping this as temporary
+ System.setProperty("org.apache.tomcat.util.buf.UDecoder.ALLOW_ENCODED_SLASH", "true");
+
+ logger.info("Starting AAIGraph connections and the ModelInjestor");
+
+ if(env.acceptsProfiles(Profiles.TWO_WAY_SSL) && env.acceptsProfiles(Profiles.ONE_WAY_SSL)){
+ logger.warn("You have seriously misconfigured your application");
+ }
+
+ AAIConfig.init();
+ ModelInjestor.getInstance();
+ AAIGraph.getInstance();
+ }
+
+ @PreDestroy
+ public void cleanup(){
+ logger.info("Shutting down both realtime and cached connections");
+ AAIGraph.getInstance().graphShutdown();
+ }
+
+ public static void main(String[] args) {
+
+ setDefaultProps();
+ SpringApplication app = new SpringApplication(ResourcesApp.class);
+ app.setRegisterShutdownHook(true);
+ app.addInitializers(new PropertyPasswordConfiguration());
+ Environment env = app.run(args).getEnvironment();
+
+ logger.info(
+ "Application '{}' is running on {}!" ,
+ env.getProperty("spring.application.name"),
+ env.getProperty("server.port")
+ );
+
+ if ("true".equals(AAIConfig.get("aai.run.migrations", "false"))) {
+ MigrationControllerInternal migrations = new MigrationControllerInternal();
+ migrations.run(new String[]{"--commit"});
+ }
+
+ logger.info("Resources MicroService Started");
+ logger.error("Resources MicroService Started");
+ logger.debug("Resources MicroService Started");
+ System.out.println("Resources Microservice Started");
+ }
+
+ public static void setDefaultProps(){
+
+ if (System.getProperty("file.separator") == null) {
+ System.setProperty("file.separator", "/");
+ }
+
+ String currentDirectory = System.getProperty("user.dir");
+
+ if (System.getProperty("AJSC_HOME") == null) {
+ System.setProperty("AJSC_HOME", ".");
+ }
+
+ if(currentDirectory.contains(APP_NAME)){
+ if (System.getProperty("BUNDLECONFIG_DIR") == null) {
+ System.setProperty("BUNDLECONFIG_DIR", "src/main/resources");
+ }
+ } else {
+ if (System.getProperty("BUNDLECONFIG_DIR") == null) {
+ System.setProperty("BUNDLECONFIG_DIR", "aai-resources/src/main/resources");
+ }
+ }
+
+ }
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java b/aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java
deleted file mode 100644
index f7b08d9..0000000
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.ajsc_aai;
-
-//import java.util.HashMap;
-//import java.util.Map;
-
-//import javax.ws.rs.GET;
-//import javax.ws.rs.HeaderParam;
-//import javax.ws.rs.Path;
-//import javax.ws.rs.PathParam;
-//import javax.ws.rs.Produces;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-//import org.springframework.web.context.ContextLoader;
-//import org.springframework.web.context.WebApplicationContext;
-
-//import ajsc.ErrorMessageLookupService;
-
-//@Path("/errormessage")
-//public class JaxrsErrorMessageLookupService {
-
- //private final static Logger logger = LoggerFactory
- //.getLogger(ErrorMessageLookupService.class);
-
- /**
- * Gets the message.
- *
- * @param input the input
- * @param errorCode the error code
- * @param appId the app id
- * @param operation the operation
- * @param messageText the message text
- * @param isRESTService the is REST service
- * @param faultEntity the fault entity
- * @param ConvID the conv ID
- * @return the message
- */
- //@GET
- //@Path("/emls")
- //@Produces("text/plain")
- //public String getMessage(@PathParam("input") String input,
- //@HeaderParam("errorCode") String errorCode,
- //@HeaderParam("appId") String appId,
- //@HeaderParam("operation") String operation,
- //@HeaderParam("messageText") String messageText,
- //@HeaderParam("isRESTService") String isRESTService,
- //@HeaderParam("faultEntity") String faultEntity,
- //@HeaderParam("ConvID") String ConvID) {
-
- //Map<String, String> headers = new HashMap<String, String>();
- //headers.put(errorCode, errorCode);
- //headers.put(appId, appId);
- //headers.put(operation, operation);
- //headers.put(messageText, messageText);
- //headers.put(isRESTService, isRESTService);
- //headers.put(faultEntity, faultEntity);
- //headers.put(ConvID, ConvID);
-
- //WebApplicationContext applicationContext = ContextLoader
- //.getCurrentWebApplicationContext();
-
- //ErrorMessageLookupService e = (ErrorMessageLookupService) applicationContext
- //.getBean("errorMessageLookupService");
-
- //String message = e.getExceptionDetails(appId, operation, errorCode,
- //messageText,isRESTService, faultEntity, ConvID);
-
- //System.out.println("Error code = " + errorCode);
- //System.out.println("appId = " + appId);
- //System.out.println("operation = " + operation);
- //System.out.println("messageText = " + messageText);
- //System.out.println("isRESTService = " + isRESTService);
- //System.out.println("faultEntity = " + faultEntity);
- //System.out.println("ConvID = " + ConvID);
- //return "The exception message is:\n " + message;
- //}
-
-//}
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java b/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java
deleted file mode 100644
index 7274c61..0000000
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.ajsc_aai.filemonitor;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-public class ServicePropertiesMap
-{
- private static HashMap<String, HashMap<String, String>> mapOfMaps = new HashMap<String, HashMap<String, String>>();
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ServicePropertiesMap.class);
-
- /**
- * Refresh.
- *
- * @param file the file
- * @throws Exception the exception
- */
- public static void refresh(File file) throws Exception
- {
- try
- {
- LOGGER.info("Loading properties - " + (file != null?file.getName():""));
-
- //Store .json & .properties files into map of maps
- String filePath = file.getPath();
-
- if(filePath.lastIndexOf(".json")>0){
-
- ObjectMapper om = new ObjectMapper();
- TypeReference<HashMap<String, String>> typeRef = new TypeReference<HashMap<String, String>>() {};
- HashMap<String, String> propMap = om.readValue(file, typeRef);
- HashMap<String, String> lcasePropMap = new HashMap<String, String>();
- for (String key : propMap.keySet() )
- {
- String lcaseKey = ifNullThenEmpty(key);
- lcasePropMap.put(lcaseKey, propMap.get(key));
- }
-
- mapOfMaps.put(file.getName(), lcasePropMap);
-
-
- }else if(filePath.lastIndexOf(".properties")>0){
- Properties prop = new Properties();
- FileInputStream fis = new FileInputStream(file);
- prop.load(fis);
-
- @SuppressWarnings("unchecked")
- HashMap<String, String> propMap = new HashMap<String, String>((Map)prop);
-
- mapOfMaps.put(file.getName(), propMap);
- }
-
- LOGGER.info("File - " + file.getName() + " is loaded into the map and the corresponding system properties have been refreshed");
- }
- catch (Exception e)
- {
- LOGGER.error("File " + (file != null?file.getName():"") + " cannot be loaded into the map ", e);
- throw new Exception("Error reading map file " + (file != null?file.getName():""), e);
- }
- }
-
- /**
- * Gets the property.
- *
- * @param fileName the file name
- * @param propertyKey the property key
- * @return the property
- */
- public static String getProperty(String fileName, String propertyKey)
- {
- HashMap<String, String> propMap = mapOfMaps.get(fileName);
- return propMap!=null?propMap.get(ifNullThenEmpty(propertyKey)):"";
- }
-
- /**
- * Gets the properties.
- *
- * @param fileName the file name
- * @return the properties
- */
- public static HashMap<String, String> getProperties(String fileName){
- return mapOfMaps.get(fileName);
- }
-
- /**
- * If null then empty.
- *
- * @param key the key
- * @return the string
- */
- private static String ifNullThenEmpty(String key) {
- if (key == null) {
- return "";
- } else {
- return key;
- }
- }
-
-}
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java b/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java
deleted file mode 100644
index 956d0e4..0000000
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.ajsc_aai.filemonitor;
-
-//import java.io.File;
-//import java.io.FileInputStream;
-//import java.io.IOException;
-//import java.lang.reflect.Method;
-//import java.util.ArrayList;
-//import java.util.List;
-//import java.util.Properties;
-
-//import javax.annotation.PostConstruct;
-
-//import org.slf4j.Logger;
-//import org.slf4j.LoggerFactory;
-
-//import com.att.ssf.filemonitor.FileChangedListener;
-//import com.att.ssf.filemonitor.FileMonitor;
-
-//public class ServicePropertyService {
- //private boolean loadOnStartup;
- //private ServicePropertiesListener fileChangedListener;
- //private ServicePropertiesMap filePropertiesMap;
- //private String ssfFileMonitorPollingInterval;
- //private String ssfFileMonitorThreadpoolSize;
- //private List<File> fileList;
- //private static final String FILE_CHANGE_LISTENER_LOC = System
- //.getProperty("AJSC_CONF_HOME") + "/etc";
- //private static final String USER_CONFIG_FILE = "service-file-monitor.properties";
- //static final Logger logger = LoggerFactory
- //.getLogger(ServicePropertyService.class);
-
- //// do not remove the postConstruct annotation, init method will not be
- //// called after constructor
- /**
- * Inits the.
- *
- * @throws Exception the exception
- */
- //@PostConstruct
- //public void init() throws Exception {
-
- //try {
- //getFileList(FILE_CHANGE_LISTENER_LOC);
-
- //for (File file : fileList) {
- //try {
- //FileChangedListener fileChangedListener = this.fileChangedListener;
- //Object filePropertiesMap = this.filePropertiesMap;
- //Method m = filePropertiesMap.getClass().getMethod(
- //"refresh", File.class);
- //m.invoke(filePropertiesMap, file);
- //FileMonitor fm = FileMonitor.getInstance();
- //fm.addFileChangedListener(file, fileChangedListener,
- //loadOnStartup);
- //} catch (Exception ioe) {
- //logger.error("Error in the file monitor block", ioe);
- //}
- //}
- //} catch (Exception ex) {
- //logger.error("Error creating property map ", ex);
- //}
-
- //}
-
- /**
- * Gets the file list.
- *
- * @param dirName the dir name
- * @return the file list
- * @throws IOException Signals that an I/O exception has occurred.
- */
- //private void getFileList(String dirName) throws IOException {
- //File directory = new File(dirName);
- //FileInputStream fis = null;
-
- //if (fileList == null)
- //fileList = new ArrayList<File>();
-
- //// get all the files that are ".json" or ".properties", from a directory
- //// & it's sub-directories
- //File[] fList = directory.listFiles();
-
- //for (File file : fList) {
- //// read service property files from the configuration file
- //if (file.isFile() && file.getPath().endsWith(USER_CONFIG_FILE)) {
- //try {
- //fis = new FileInputStream(file);
- //Properties prop = new Properties();
- //prop.load(fis);
-
- //for (String filePath : prop.stringPropertyNames()) {
- //fileList.add(new File(prop.getProperty(filePath)));
- //}
- //} catch (Exception ioe) {
- //logger.error("Error reading the file stream ", ioe);
- //} finally {
- //fis.close();
- //}
- //} else if (file.isDirectory()) {
- //getFileList(file.getPath());
- //}
- //}
-
- //}
-
- /**
- * Sets the load on startup.
- *
- * @param loadOnStartup the new load on startup
- */
- //public void setLoadOnStartup(boolean loadOnStartup) {
- //this.loadOnStartup = loadOnStartup;
- //}
-
- /**
- * Sets the ssf file monitor polling interval.
- *
- * @param ssfFileMonitorPollingInterval the new ssf file monitor polling interval
- */
- //public void setSsfFileMonitorPollingInterval(
- //String ssfFileMonitorPollingInterval) {
- //this.ssfFileMonitorPollingInterval = ssfFileMonitorPollingInterval;
- //}
-
- /**
- * Sets the ssf file monitor threadpool size.
- *
- * @param ssfFileMonitorThreadpoolSize the new ssf file monitor threadpool size
- */
- //public void setSsfFileMonitorThreadpoolSize(
- //String ssfFileMonitorThreadpoolSize) {
- //this.ssfFileMonitorThreadpoolSize = ssfFileMonitorThreadpoolSize;
- //}
-
- /**
- * Gets the load on startup.
- *
- * @return the load on startup
- */
- //public boolean getLoadOnStartup() {
- //return loadOnStartup;
- //}
-
- /**
- * Gets the ssf file monitor polling interval.
- *
- * @return the ssf file monitor polling interval
- */
- //public String getSsfFileMonitorPollingInterval() {
- //return ssfFileMonitorPollingInterval;
- //}
-
- /**
- * Gets the ssf file monitor threadpool size.
- *
- * @return the ssf file monitor threadpool size
- */
- //public String getSsfFileMonitorThreadpoolSize() {
- //return ssfFileMonitorThreadpoolSize;
- //}
-
- /**
- * Gets the file changed listener.
- *
- * @return the file changed listener
- */
- //public ServicePropertiesListener getFileChangedListener() {
- //return fileChangedListener;
- //}
-
- /**
- * Sets the file changed listener.
- *
- * @param fileChangedListener the new file changed listener
- */
- //public void setFileChangedListener(
- //ServicePropertiesListener fileChangedListener) {
- //this.fileChangedListener = fileChangedListener;
- //}
-
- /**
- * Gets the file properties map.
- *
- * @return the file properties map
- */
- //public ServicePropertiesMap getFilePropertiesMap() {
- //return filePropertiesMap;
- //}
-
- /**
- * Sets the file properties map.
- *
- * @param filePropertiesMap the new file properties map
- */
- //public void setFilePropertiesMap(ServicePropertiesMap filePropertiesMap) {
- //this.filePropertiesMap = filePropertiesMap;
- //}
-//}
diff --git a/aai-resources/src/main/java/org/onap/aai/config/DmaapConfig.java b/aai-resources/src/main/java/org/onap/aai/config/JettyPasswordDecoder.java
index c34ae0a..5aef2eb 100644
--- a/aai-resources/src/main/java/org/onap/aai/config/DmaapConfig.java
+++ b/aai-resources/src/main/java/org/onap/aai/config/JettyPasswordDecoder.java
@@ -21,23 +21,15 @@
*/
package org.onap.aai.config;
-import org.apache.activemq.broker.BrokerService;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
+import org.eclipse.jetty.util.security.Password;
-@Configuration
-public class DmaapConfig {
+public class JettyPasswordDecoder implements PasswordDecoder {
- @Bean(destroyMethod = "stop")
- public BrokerService brokerService() throws Exception {
-
- BrokerService broker = new BrokerService();
- broker.addConnector("tcp://localhost:61447");
- broker.setPersistent(false);
- broker.setUseJmx(false);
- broker.setSchedulerSupport(false);
- broker.start();
-
- return broker;
+ @Override
+ public String decode(String input) {
+ if (input.startsWith("OBF:")) {
+ return Password.deobfuscate(input);
+ }
+ return Password.deobfuscate("OBF:" + input);
}
}
diff --git a/aai-resources/src/main/java/org/onap/aai/config/PasswordDecoder.java b/aai-resources/src/main/java/org/onap/aai/config/PasswordDecoder.java
new file mode 100644
index 0000000..8c199eb
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/config/PasswordDecoder.java
@@ -0,0 +1,27 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.config;
+
+public interface PasswordDecoder {
+
+ String decode(String input);
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java b/aai-resources/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
new file mode 100644
index 0000000..623c7e9
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
@@ -0,0 +1,83 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.config;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.springframework.context.ApplicationContextInitializer;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.core.env.CompositePropertySource;
+import org.springframework.core.env.ConfigurableEnvironment;
+import org.springframework.core.env.EnumerablePropertySource;
+import org.springframework.core.env.MapPropertySource;
+import org.springframework.core.env.PropertySource;
+import org.springframework.stereotype.Component;
+
+public class PropertyPasswordConfiguration implements ApplicationContextInitializer<ConfigurableApplicationContext> {
+
+ private static final Pattern decodePasswordPattern = Pattern.compile("password\\((.*?)\\)");
+
+ private PasswordDecoder passwordDecoder = new JettyPasswordDecoder();
+
+ @Override
+ public void initialize(ConfigurableApplicationContext applicationContext) {
+ ConfigurableEnvironment environment = applicationContext.getEnvironment();
+ for (PropertySource<?> propertySource : environment.getPropertySources()) {
+ Map<String, Object> propertyOverrides = new LinkedHashMap<>();
+ decodePasswords(propertySource, propertyOverrides);
+ if (!propertyOverrides.isEmpty()) {
+ PropertySource<?> decodedProperties = new MapPropertySource("decoded "+ propertySource.getName(), propertyOverrides);
+ environment.getPropertySources().addBefore(propertySource.getName(), decodedProperties);
+ }
+ }
+ }
+
+ private void decodePasswords(PropertySource<?> source, Map<String, Object> propertyOverrides) {
+ if (source instanceof EnumerablePropertySource) {
+ EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) source;
+ for (String key : enumerablePropertySource.getPropertyNames()) {
+ Object rawValue = source.getProperty(key);
+ if (rawValue instanceof String) {
+ String decodedValue = decodePasswordsInString((String) rawValue);
+ propertyOverrides.put(key, decodedValue);
+ }
+ }
+ }
+ }
+
+ private String decodePasswordsInString(String input) {
+ if (input == null) return null;
+ StringBuffer output = new StringBuffer();
+ Matcher matcher = decodePasswordPattern.matcher(input);
+ while (matcher.find()) {
+ String replacement = passwordDecoder.decode(matcher.group(1));
+ matcher.appendReplacement(output, replacement);
+ }
+ matcher.appendTail(output);
+ return output.toString();
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/dbgen/DupeTool.java b/aai-resources/src/main/java/org/onap/aai/dbgen/DupeTool.java
new file mode 100644
index 0000000..a19bc8e
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/dbgen/DupeTool.java
@@ -0,0 +1,1900 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.dbgen;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.*;
+import java.util.Map.Entry;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.slf4j.MDC;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+
+
+
+public class DupeTool {
+
+ private static final String FROMAPPID = "AAI-DB";
+ private static final String TRANSID = UUID.randomUUID().toString();
+
+ private static String graphType = "realdb";
+
+ public static boolean SHOULD_EXIT_VM = true;
+
+ public static int EXIT_VM_STATUS_CODE = -1;
+
+ public static void exit(int statusCode){
+ if(SHOULD_EXIT_VM){
+ System.exit(1);
+ }
+ EXIT_VM_STATUS_CODE = statusCode;
+ }
+
+
+ /**
+ * The main method.
+ *
+ * @param args the arguments
+ */
+ public static void main(String[] args) {
+ System.setProperty("aai.service.name", DupeTool.class.getSimpleName());
+ // Set the logging file properties to be used by EELFManager
+ Properties props = System.getProperties();
+ props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "dupeTool-logback.xml");
+ props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
+ EELFLogger logger = EELFManager.getInstance().getLogger(DupeTool.class.getSimpleName());
+ MDC.put("logFilenameAppender", DupeTool.class.getSimpleName());
+
+ LoggingContext.init();
+ LoggingContext.partnerName(FROMAPPID);
+ LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+ LoggingContext.component("dupeTool");
+ LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+ LoggingContext.targetServiceName("main");
+ LoggingContext.requestId(TRANSID);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+ String defVersion = "v9";
+ try {
+ defVersion = AAIConfig.get(AAIConstants.AAI_DEFAULT_API_VERSION_PROP);
+ }
+ catch ( AAIException ae ){
+ String emsg = "Error trying to get default API Version property \n";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ exit(0);
+ }
+
+ Loader loader= null;
+ try {
+ loader = LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
+
+ }
+ catch (Exception ex){
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+ logger.error("ERROR - Could not do the moxyMod.init() " + LogFormatTools.getStackTop(ex));
+ exit(1);
+ }
+ TitanGraph graph1 = null;
+ TitanGraph graph2 = null;
+ Graph gt1 = null;
+ Graph gt2 = null;
+
+ boolean specialTenantRule = false;
+
+ try {
+ AAIConfig.init();
+ int maxRecordsToFix = AAIConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX;
+ int sleepMinutes = AAIConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES;
+ int timeWindowMinutes = 0; // A value of 0 means that we will not have a time-window -- we will look
+ // at all nodes of the passed-in nodeType.
+ long windowStartTime = 0; // Translation of the window into a starting timestamp
+
+ try {
+ String maxFixStr = AAIConfig.get("aai.dupeTool.default.max.fix");
+ if( maxFixStr != null && !maxFixStr.equals("") ){
+ maxRecordsToFix = Integer.parseInt(maxFixStr);
+ }
+ String sleepStr = AAIConfig.get("aai.dupeTool.default.sleep.minutes");
+ if( sleepStr != null && !sleepStr.equals("") ){
+ sleepMinutes = Integer.parseInt(sleepStr);
+ }
+ }
+ catch ( Exception e ){
+ // Don't worry, we'll just use the defaults that we got from AAIConstants
+ logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file. Will use defaults. ");
+ }
+
+ String nodeTypeVal = "";
+ String userIdVal = "";
+ String filterParams = "";
+ Boolean skipHostCheck = false;
+ Boolean autoFix = false;
+ String argStr4Msg = "";
+ Introspector obj = null;
+
+ if (args != null && args.length > 0) {
+ // They passed some arguments in that will affect processing
+ for (int i = 0; i < args.length; i++) {
+ String thisArg = args[i];
+ argStr4Msg = argStr4Msg + " " + thisArg;
+
+ if (thisArg.equals("-nodeType")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(" No value passed with -nodeType option. ");
+ exit(0);
+ }
+ nodeTypeVal = args[i];
+ argStr4Msg = argStr4Msg + " " + nodeTypeVal;
+ }
+ else if (thisArg.equals("-sleepMinutes")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("No value passed with -sleepMinutes option.");
+ exit(0);
+ }
+ String nextArg = args[i];
+ try {
+ sleepMinutes = Integer.parseInt(nextArg);
+ } catch (Exception e) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("Bad value passed with -sleepMinutes option: ["
+ + nextArg + "]");
+ exit(0);
+ }
+ argStr4Msg = argStr4Msg + " " + sleepMinutes;
+ }
+ else if (thisArg.equals("-maxFix")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("No value passed with -maxFix option.");
+ exit(0);
+ }
+ String nextArg = args[i];
+ try {
+ maxRecordsToFix = Integer.parseInt(nextArg);
+ } catch (Exception e) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("Bad value passed with -maxFix option: ["
+ + nextArg + "]");
+ exit(0);
+ }
+ argStr4Msg = argStr4Msg + " " + maxRecordsToFix;
+ }
+ else if (thisArg.equals("-timeWindowMinutes")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("No value passed with -timeWindowMinutes option.");
+ exit(0);
+ }
+ String nextArg = args[i];
+ try {
+ timeWindowMinutes = Integer.parseInt(nextArg);
+ } catch (Exception e) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error("Bad value passed with -timeWindowMinutes option: ["
+ + nextArg + "]");
+ exit(0);
+ }
+ argStr4Msg = argStr4Msg + " " + timeWindowMinutes;
+ }
+ else if (thisArg.equals("-skipHostCheck")) {
+ skipHostCheck = true;
+ }
+ else if (thisArg.equals("-specialTenantRule")) {
+ specialTenantRule = true;
+ }
+ else if (thisArg.equals("-autoFix")) {
+ autoFix = true;
+ }
+ else if (thisArg.equals("-userId")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(" No value passed with -userId option. ");
+ exit(0);
+ }
+ userIdVal = args[i];
+ argStr4Msg = argStr4Msg + " " + userIdVal;
+ }
+ else if (thisArg.equals("-params4Collect")) {
+ i++;
+ if (i >= args.length) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(" No value passed with -params4Collect option. ");
+ exit(0);
+ }
+ filterParams = args[i];
+ argStr4Msg = argStr4Msg + " " + filterParams;
+ }
+ else {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(" Unrecognized argument passed to DupeTool: ["
+ + thisArg + "]. ");
+ logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection ");
+ exit(0);
+ }
+ }
+ }
+
+ userIdVal = userIdVal.trim();
+ if( (userIdVal.length() < 6) || userIdVal.toUpperCase().equals("AAIADMIN") ){
+ String emsg = "userId parameter is required. [" + userIdVal + "] passed to DupeTool(). userId must be not empty and not aaiadmin \n";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(emsg);
+ exit(0);
+ }
+
+ nodeTypeVal = nodeTypeVal.trim();
+ if( nodeTypeVal.equals("") ){
+ String emsg = " nodeType is a required parameter for DupeTool().\n";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+ logger.error(emsg);
+ exit(0);
+ } else {
+ obj = loader.introspectorFromName(nodeTypeVal);
+ }
+
+ if (skipHostCheck) {
+ logger.info(" We will skip the HostCheck as requested. ");
+ }
+
+ if( timeWindowMinutes > 0 ){
+ // Translate the window value (ie. 30 minutes) into a unix timestamp like
+ // we use in the db - so we can select data created after that time.
+ windowStartTime = figureWindowStartTime( timeWindowMinutes );
+ }
+
+ String msg = "";
+ msg = "DupeTool called with these params: [" + argStr4Msg + "]";
+ System.out.println(msg);
+ logger.info(msg);
+
+ // Determine what the key fields are for this nodeType (and we want them ordered)
+ ArrayList <String> keyPropNamesArr = new ArrayList<String>(obj.getKeys());
+
+ // Determine what kinds of nodes (if any) this nodeType is dependent on for uniqueness
+ ArrayList<String> depNodeTypeList = new ArrayList<String>();
+ Collection<String> depNTColl = obj.getDependentOn();
+ Iterator<String> ntItr = depNTColl.iterator();
+ while( ntItr.hasNext() ){
+ depNodeTypeList.add(ntItr.next());
+ }
+
+ // Based on the nodeType, window and filterData, figure out the vertices that we will be checking
+ System.out.println(" ---- NOTE --- about to open graph (takes a little while)--------\n");
+ graph1 = setupGraph(logger);
+ gt1 = getGraphTransaction( graph1, logger );
+ ArrayList<Vertex> verts2Check = new ArrayList<Vertex>();
+ try {
+ verts2Check = figureOutNodes2Check( TRANSID, FROMAPPID, gt1,
+ nodeTypeVal, windowStartTime, filterParams, logger );
+ }
+ catch ( AAIException ae ){
+ String emsg = "Error trying to get initial set of nodes to check. \n";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ exit(0);
+ }
+
+ if( verts2Check == null || verts2Check.size() == 0 ){
+ msg = " No vertices found to check. Used nodeType = [" + nodeTypeVal
+ + "], windowMinutes = " + timeWindowMinutes
+ + ", filterData = [" + filterParams + "].";
+ logger.info( msg );
+ System.out.println( msg );
+ exit(0);
+ }
+ else {
+ msg = " Found " + verts2Check.size() + " nodes of type " + nodeTypeVal
+ + " to check using passed filterParams and windowStartTime. ";
+ logger.info( msg );
+ System.out.println( msg );
+ }
+
+ ArrayList <String> firstPassDupeSets = new ArrayList <String>();
+ ArrayList <String> secondPassDupeSets = new ArrayList <String>();
+ Boolean isDependentOnParent = false;
+ if( !obj.getDependentOn().isEmpty() ){
+ isDependentOnParent = true;
+ }
+
+ if( isDependentOnParent ){
+ firstPassDupeSets = getDupeSets4DependentNodes( TRANSID, FROMAPPID, gt1,
+ defVersion, nodeTypeVal, verts2Check, keyPropNamesArr, loader,
+ specialTenantRule, logger );
+ }
+ else {
+ firstPassDupeSets = getDupeSets4NonDepNodes( TRANSID, FROMAPPID, gt1,
+ defVersion, nodeTypeVal, verts2Check, keyPropNamesArr,
+ specialTenantRule, loader, logger );
+ }
+
+ msg = " Found " + firstPassDupeSets.size() + " sets of duplicates for this request. ";
+ logger.info( msg );
+ System.out.println( msg );
+ if( firstPassDupeSets.size() > 0 ){
+ msg = " Here is what they look like: ";
+ logger.info( msg );
+ System.out.println( msg );
+ for( int x = 0; x < firstPassDupeSets.size(); x++ ){
+ msg = " Set " + x + ": [" + firstPassDupeSets.get(x) +"] ";
+ logger.info( msg );
+ System.out.println( msg );
+ showNodeDetailsForADupeSet(gt1, firstPassDupeSets.get(x), logger);
+ }
+ }
+
+ boolean didSomeDeletesFlag = false;
+ ArrayList <String> dupeSetsToFix = new ArrayList <String> ();
+ if( autoFix && firstPassDupeSets.size() == 0 ){
+ msg = "AutoFix option is on, but no dupes were found on the first pass. Nothing to fix.";
+ logger.info( msg );
+ System.out.println( msg );
+ }
+ else if( autoFix ){
+ // We will try to fix any dupes that we can - but only after sleeping for a
+ // time and re-checking the list of duplicates using a seperate transaction.
+ try {
+ msg = "\n\n----------- About to sleep for " + sleepMinutes + " minutes."
+ + " -----------\n\n";
+ logger.info( msg );
+ System.out.println( msg );
+ int sleepMsec = sleepMinutes * 60 * 1000;
+ Thread.sleep(sleepMsec);
+ } catch (InterruptedException ie) {
+ msg = "\n >>> Sleep Thread has been Interrupted <<< ";
+ logger.info( msg );
+ System.out.println( msg );
+ exit(0);
+ }
+
+ graph2 = setupGraph(logger);
+ gt2 = getGraphTransaction( graph2, logger );
+ if( isDependentOnParent ){
+ secondPassDupeSets = getDupeSets4DependentNodes( TRANSID, FROMAPPID, gt2,
+ defVersion, nodeTypeVal, verts2Check, keyPropNamesArr, loader,
+ specialTenantRule, logger );
+ }
+ else {
+ secondPassDupeSets = getDupeSets4NonDepNodes( TRANSID, FROMAPPID, gt2,
+ defVersion, nodeTypeVal, verts2Check, keyPropNamesArr,
+ specialTenantRule, loader, logger );
+ }
+
+ dupeSetsToFix = figureWhichDupesStillNeedFixing( firstPassDupeSets, secondPassDupeSets, logger );
+ msg = "\nAfter running a second pass, there were " + dupeSetsToFix.size()
+ + " sets of duplicates that we think can be deleted. ";
+ logger.info( msg );
+ System.out.println( msg );
+ if( dupeSetsToFix.size() > 0 ){
+ msg = " Here is what the sets look like: ";
+ logger.info( msg );
+ System.out.println( msg );
+ for( int x = 0; x < dupeSetsToFix.size(); x++ ){
+ msg = " Set " + x + ": [" + dupeSetsToFix.get(x) +"] ";
+ logger.info( msg );
+ System.out.println( msg );
+ showNodeDetailsForADupeSet(gt2, dupeSetsToFix.get(x), logger);
+ }
+ }
+
+ if( dupeSetsToFix.size() > 0 ){
+ if( dupeSetsToFix.size() > maxRecordsToFix ){
+ String infMsg = " >> WARNING >> Dupe list size ("
+ + dupeSetsToFix.size()
+ + ") is too big. The maxFix we are using is: "
+ + maxRecordsToFix
+ + ". No nodes will be deleted. (use the"
+ + " -maxFix option to override this limit.)";
+ System.out.println(infMsg);
+ logger.info(infMsg);
+ }
+ else {
+ // Call the routine that fixes known dupes
+ didSomeDeletesFlag = deleteNonKeepers( gt2, dupeSetsToFix, logger );
+ }
+ }
+ if( didSomeDeletesFlag ){
+ gt2.tx().commit();
+ }
+ }
+
+ } catch (AAIException e) {
+ logger.error("Caught AAIException while running the dupeTool: " + LogFormatTools.getStackTop(e));
+ ErrorLogHelper.logException(e);
+ } catch (Exception ex) {
+ logger.error("Caught exception while running the dupeTool: "+ LogFormatTools.getStackTop(ex));
+ ErrorLogHelper.logError("AAI_6128", ex.getMessage() + ", resolve and rerun the dupeTool. ");
+ } finally {
+ if (gt1 != null && gt1.tx().isOpen()) {
+ // We don't change any data with gt1 - so just roll it back so it knows we're done.
+ try {
+ gt1.tx().rollback();
+ }
+ catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed
+ logger.warn("WARNING from final gt1.rollback() " + LogFormatTools.getStackTop(ex));
+ }
+ }
+
+ if (gt2 != null && gt2.tx().isOpen()) {
+ // Any changes that worked correctly should have already done
+ // their commits.
+ try {
+ gt2.tx().rollback();
+ } catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed
+ logger.warn("WARNING from final gt2.rollback() " + LogFormatTools.getStackTop(ex));
+ }
+ }
+
+ try {
+ if( graph1 != null && graph1.isOpen() ){
+ closeGraph(graph1, logger);
+ }
+ } catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
+ logger.warn("WARNING from final graph1.shutdown() " + LogFormatTools.getStackTop(ex));
+ }
+
+ try {
+ if( graph2 != null && graph2.isOpen() ){
+ closeGraph(graph2, logger);
+ }
+ } catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
+ logger.warn("WARNING from final graph2.shutdown() " + LogFormatTools.getStackTop(ex));
+ }
+ }
+
+ exit(0);
+
+ }// end of main()
+
+
+ /**
+ * Collect Duplicate Sets for nodes that are NOT dependent on parent nodes.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param g the g
+ * @param version the version
+ * @param nType the n type
+ * @param passedVertList the passed vert list
+ * @param dbMaps the db maps
+ * @return the array list
+ */
+ private static ArrayList<String> getDupeSets4NonDepNodes( String transId,
+ String fromAppId, Graph g, String version, String nType,
+ ArrayList<Vertex> passedVertList,
+ ArrayList <String> keyPropNamesArr,
+ Boolean specialTenantRule, Loader loader, EELFLogger logger ) {
+
+ ArrayList<String> returnList = new ArrayList<String>();
+
+ // We've been passed a set of nodes that we want to check.
+ // They are all NON-DEPENDENT nodes meaning that they should be
+ // unique in the DB based on their KEY DATA alone. So, if
+ // we group them by their key data - if any key has more than one
+ // vertex mapped to it, those vertices are dupes.
+ //
+ // When we find duplicates, we return then as a String (there can be
+ // more than one duplicate for one set of key data):
+ // Each element in the returned arrayList might look like this:
+ // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+ // couldn't figure out which one to keep)
+ // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+ // thought the third one was the one that should survive)
+
+ HashMap <String, ArrayList<String>> keyVals2VidHash = new HashMap <String, ArrayList<String>>();
+ HashMap <String,Vertex> vtxHash = new HashMap <String,Vertex>();
+ Iterator<Vertex> pItr = passedVertList.iterator();
+ while (pItr.hasNext()) {
+ try {
+ Vertex tvx = pItr.next();
+ String thisVid = tvx.id().toString();
+ vtxHash.put(thisVid, tvx);
+
+ // if there are more than one vertexId mapping to the same keyProps -- they are dupes
+ String hKey = getNodeKeyValString( tvx, keyPropNamesArr, logger );
+ if( keyVals2VidHash.containsKey(hKey) ){
+ // We've already seen this key
+ ArrayList <String> tmpVL = (ArrayList <String>)keyVals2VidHash.get(hKey);
+ tmpVL.add(thisVid);
+ keyVals2VidHash.put(hKey, tmpVL);
+ }
+ else {
+ // First time for this key
+ ArrayList <String> tmpVL = new ArrayList <String>();
+ tmpVL.add(thisVid);
+ keyVals2VidHash.put(hKey, tmpVL);
+ }
+ }
+ catch (Exception e) {
+ logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+ }
+ }
+
+ for( Map.Entry<String, ArrayList<String>> entry : keyVals2VidHash.entrySet() ){
+ ArrayList <String> vidList = entry.getValue();
+ try {
+ if( !vidList.isEmpty() && vidList.size() > 1 ){
+ // There are more than one vertex id's using the same key info
+ String dupesStr = "";
+ ArrayList <Vertex> vertList = new ArrayList <Vertex> ();
+ for (int i = 0; i < vidList.size(); i++) {
+ String tmpVid = vidList.get(i);
+ dupesStr = dupesStr + tmpVid + "|";
+ vertList.add(vtxHash.get(tmpVid));
+ }
+
+ if (dupesStr != "") {
+ Vertex prefV = getPreferredDupe(transId, fromAppId,
+ g, vertList, version, specialTenantRule, loader, logger);
+ if (prefV == null) {
+ // We could not determine which duplicate to keep
+ dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+ returnList.add(dupesStr);
+ } else {
+ dupesStr = dupesStr + "KeepVid=" + prefV.id();
+ returnList.add(dupesStr);
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+ }
+
+ }
+ return returnList;
+
+ }// End of getDupeSets4NonDepNodes()
+
+
+ /**
+ * Collect Duplicate Sets for nodes that are dependent on parent nodes.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param g the g
+ * @param version the version
+ * @param nType the n type
+ * @param passedVertList the passed vert list
+ * @param dbMaps the db maps
+ * @param keyPropNamesArr Array (ordered) of keyProperty names
+ * @param specialTenantRule flag
+ * @param EELFLogger the logger
+ * @return the array list
+ */
+ private static ArrayList<String> getDupeSets4DependentNodes( String transId,
+ String fromAppId, Graph g, String version, String nType,
+ ArrayList<Vertex> passedVertList,
+ ArrayList <String> keyPropNamesArr, Loader loader,
+ Boolean specialTenantRule, EELFLogger logger ) {
+
+ // This is for nodeTypes that DEPEND ON A PARENT NODE FOR UNIQUNESS
+
+ ArrayList<String> returnList = new ArrayList<String>();
+ ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<String>();
+
+ // We've been passed a set of nodes that we want to check. These are
+ // all nodes that ARE DEPENDENT on a PARENT Node for uniqueness.
+ // The first thing to do is to identify the key properties for the node-type
+ // and pull from the db just using those properties.
+ // Then, we'll check those nodes with their parent nodes to see if there
+ // are any duplicates.
+ //
+ // When we find duplicates, we return then as a String (there can be
+ // more than one duplicate for one set of key data):
+ // Each element in the returned arrayList might look like this:
+ // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+ // couldn't figure out which one to keep)
+ // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+ // thought the third one was the one that should survive)
+ HashMap <String, Object> checkVertHash = new HashMap <String,Object> ();
+ try {
+ Iterator<Vertex> pItr = passedVertList.iterator();
+ while (pItr.hasNext()) {
+ Vertex tvx = pItr.next();
+ String passedId = tvx.id().toString();
+ if( !alreadyFoundDupeVidArr.contains(passedId) ){
+ // We haven't seen this one before - so we should check it.
+ HashMap <String,Object> keyPropValsHash = getNodeKeyVals( tvx, keyPropNamesArr, logger );
+ ArrayList <Vertex> tmpVertList = getNodeJustUsingKeyParams( transId, fromAppId, g,
+ nType, keyPropValsHash, version, logger );
+
+ if( tmpVertList.size() <= 1 ){
+ // Even without a parent node, this thing is unique so don't worry about it.
+ }
+ else {
+ for( int i = 0; i < tmpVertList.size(); i++ ){
+ Vertex tmpVtx = (tmpVertList.get(i));
+ String tmpVid = tmpVtx.id().toString();
+ alreadyFoundDupeVidArr.add(tmpVid);
+
+ String hKey = getNodeKeyValString( tmpVtx, keyPropNamesArr, logger );
+ if( checkVertHash.containsKey(hKey) ){
+ // add it to an existing list
+ ArrayList <Vertex> tmpVL = (ArrayList <Vertex>)checkVertHash.get(hKey);
+ tmpVL.add(tmpVtx);
+ checkVertHash.put(hKey, tmpVL);
+ }
+ else {
+ // First time for this key
+ ArrayList <Vertex> tmpVL = new ArrayList <Vertex>();
+ tmpVL.add(tmpVtx);
+ checkVertHash.put(hKey, tmpVL);
+ }
+ }
+ }
+ }
+ }
+
+ // More than one node have the same key fields since they may
+ // depend on a parent node for uniqueness. Since we're finding
+ // more than one, we want to check to see if any of the
+ // vertices that have this set of keys are also pointing at the
+ // same 'parent' node.
+ // Note: for a given set of key data, it is possible that there
+ // could be more than one set of duplicates.
+ for (Entry<String, Object> lentry : checkVertHash.entrySet()) {
+ ArrayList <Vertex> thisIdSetList = (ArrayList <Vertex>)lentry.getValue();
+ if (thisIdSetList == null || thisIdSetList.size() < 2) {
+ // Nothing to check for this set.
+ continue;
+ }
+
+ HashMap<String, ArrayList<Vertex>> vertsGroupedByParentHash = groupVertsByDepNodes(
+ transId, fromAppId, g, version, nType,
+ thisIdSetList, loader);
+ for (Map.Entry<String, ArrayList<Vertex>> entry : vertsGroupedByParentHash
+ .entrySet()) {
+ ArrayList<Vertex> thisParentsVertList = entry
+ .getValue();
+ if (thisParentsVertList.size() > 1) {
+ // More than one vertex found with the same key info
+ // hanging off the same parent/dependent node
+ String dupesStr = "";
+ for (int i = 0; i < thisParentsVertList.size(); i++) {
+ dupesStr = dupesStr
+ + ( (thisParentsVertList
+ .get(i))).id() + "|";
+ }
+ if (dupesStr != "") {
+ Vertex prefV = getPreferredDupe(transId,
+ fromAppId, g, thisParentsVertList,
+ version, specialTenantRule, loader, logger);
+
+ if (prefV == null) {
+ // We could not determine which duplicate to keep
+ dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+ returnList.add(dupesStr);
+ }
+ else {
+ dupesStr = dupesStr + "KeepVid="
+ + prefV.id().toString();
+ returnList.add(dupesStr);
+ }
+ }
+ }
+ }
+ }
+
+ } catch (Exception e) {
+ logger.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+ }
+
+ return returnList;
+
+ }// End of getDupeSets4DependentNodes()
+
+
+ private static Graph getGraphTransaction(TitanGraph graph, EELFLogger logger){
+
+ Graph gt = null;
+ try {
+ if( graph == null ){
+ String emsg = "could not get graph object in DupeTool. \n";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+ logger.error(emsg);
+ exit(0);
+ }
+ gt = graph.newTransaction();
+ if (gt == null) {
+ String emsg = "null graphTransaction object in DupeTool. \n";
+ throw new AAIException("AAI_6101", emsg);
+ }
+
+ }
+ catch (AAIException e1) {
+ String msg = e1.getErrorObject().toString();
+ System.out.println(msg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(msg);
+ exit(0);
+ }
+ catch (Exception e2) {
+ String msg = e2.toString();
+ System.out.println(msg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+ logger.error(msg);
+ exit(0);
+ }
+
+ return gt;
+
+ }// End of getGraphTransaction()
+
+
+
+ public static void showNodeInfo(EELFLogger logger, Vertex tVert, Boolean displayAllVidsFlag ){
+
+ try {
+ Iterator<VertexProperty<Object>> pI = tVert.properties();
+ String infStr = ">>> Found Vertex with VertexId = " + tVert.id() + ", properties: ";
+ System.out.println( infStr );
+ logger.info(infStr);
+ while( pI.hasNext() ){
+ VertexProperty<Object> tp = pI.next();
+ infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+ System.out.println( infStr );
+ logger.info(infStr);
+ }
+
+ ArrayList <String> retArr = collectEdgeInfoForNode( logger, tVert, displayAllVidsFlag );
+ for( String infoStr : retArr ){
+ System.out.println( infoStr );
+ logger.info(infoStr);
+ }
+ }
+ catch (Exception e){
+ String warnMsg = " -- Error -- trying to display edge info. [" + e.getMessage() + "]";
+ System.out.println( warnMsg );
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+ logger.warn(warnMsg);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ }
+
+ }// End of showNodeInfo()
+
+
+ public static ArrayList <String> collectEdgeInfoForNode( EELFLogger logger, Vertex tVert, boolean displayAllVidsFlag ){
+ ArrayList <String> retArr = new ArrayList <String> ();
+ Direction dir = Direction.OUT;
+ for ( int i = 0; i <= 1; i++ ){
+ if( i == 1 ){
+ // Second time through we'll look at the IN edges.
+ dir = Direction.IN;
+ }
+ Iterator <Edge> eI = tVert.edges(dir);
+ if( ! eI.hasNext() ){
+ retArr.add("No " + dir + " edges were found for this vertex. ");
+ }
+ while( eI.hasNext() ){
+ Edge ed = eI.next();
+ String lab = ed.label();
+ Vertex vtx = null;
+ if( dir == Direction.OUT ){
+ // get the vtx on the "other" side
+ vtx = ed.inVertex();
+ }
+ else {
+ // get the vtx on the "other" side
+ vtx = ed.outVertex();
+ }
+ if( vtx == null ){
+ retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+ }
+ else {
+ String nType = vtx.<String>property("aai-node-type").orElse(null);
+ if( displayAllVidsFlag ){
+ // This should rarely be needed
+ String vid = vtx.id().toString();
+ retArr.add("Found an " + dir + " edge (" + lab + ") between this vertex and a [" + nType + "] node with VtxId = " + vid );
+ }
+ else {
+ // This is the normal case
+ retArr.add("Found an " + dir + " edge (" + lab + ") between this vertex and a [" + nType + "] node. ");
+ }
+ }
+ }
+ }
+ return retArr;
+
+ }// end of collectEdgeInfoForNode()
+
+
+ private static long figureWindowStartTime( int timeWindowMinutes ){
+ // Given a window size, calculate what the start-timestamp would be.
+
+ if( timeWindowMinutes <= 0 ){
+ // This just means that there is no window...
+ return 0;
+ }
+ long unixTimeNow = System.currentTimeMillis();
+ long windowInMillis = timeWindowMinutes * 60 * 1000;
+
+ long startTimeStamp = unixTimeNow - windowInMillis;
+
+ return startTimeStamp;
+ } // End of figureWindowStartTime()
+
+
+
+ /**
+ * Gets the node(s) just using key params.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param graph the graph
+ * @param nodeType the node type
+ * @param keyPropsHash the key props hash
+ * @param apiVersion the api version
+ * @return the node just using key params
+ * @throws AAIException the AAI exception
+ */
+ public static ArrayList <Vertex> getNodeJustUsingKeyParams( String transId, String fromAppId, Graph graph, String nodeType,
+ HashMap<String,Object> keyPropsHash, String apiVersion, EELFLogger logger ) throws AAIException{
+
+ ArrayList <Vertex> retVertList = new ArrayList <Vertex> ();
+
+ // We assume that all NodeTypes have at least one key-property defined.
+ // Note - instead of key-properties (the primary key properties), a user could pass
+ // alternate-key values if they are defined for the nodeType.
+ ArrayList<String> kName = new ArrayList<String>();
+ ArrayList<Object> kVal = new ArrayList<Object>();
+ if( keyPropsHash == null || keyPropsHash.isEmpty() ) {
+ throw new AAIException("AAI_6120", " NO key properties passed for this getNodeJustUsingKeyParams() request. NodeType = [" + nodeType + "]. ");
+ }
+
+ int i = -1;
+ for( Map.Entry<String, Object> entry : keyPropsHash.entrySet() ){
+ i++;
+ kName.add(i, entry.getKey());
+ kVal.add(i, entry.getValue());
+ }
+ int topPropIndex = i;
+ Vertex tiV = null;
+ String propsAndValuesForMsg = "";
+ Iterator<Vertex> verts = null;
+ GraphTraversalSource g = graph.traversal();
+ try {
+ if( topPropIndex == 0 ){
+ propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ") ";
+ verts= g.V().has(kName.get(0),kVal.get(0)).has("aai-node-type",nodeType);
+ }
+ else if( topPropIndex == 1 ){
+ propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+ + kName.get(1) + " = " + kVal.get(1) + ") ";
+ verts = g.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has("aai-node-type",nodeType);
+ }
+ else if( topPropIndex == 2 ){
+ propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+ + kName.get(1) + " = " + kVal.get(1) + ", "
+ + kName.get(2) + " = " + kVal.get(2) + ") ";
+ verts= g.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has("aai-node-type",nodeType);
+ }
+ else if( topPropIndex == 3 ){
+ propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+ + kName.get(1) + " = " + kVal.get(1) + ", "
+ + kName.get(2) + " = " + kVal.get(2) + ", "
+ + kName.get(3) + " = " + kVal.get(3) + ") ";
+ verts= g.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has(kName.get(3),kVal.get(3)).has("aai-node-type",nodeType);
+ }
+ else {
+ throw new AAIException("AAI_6114", " We only support 4 keys per nodeType for now \n");
+ }
+ }
+ catch( Exception ex ){
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error( " ERROR trying to get node for: [" + propsAndValuesForMsg + "] " + LogFormatTools.getStackTop(ex));
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ }
+
+ if( verts != null ){
+ while( verts.hasNext() ){
+ tiV = verts.next();
+ retVertList.add(tiV);
+ }
+ }
+
+ if( retVertList.size() == 0 ){
+ logger.debug("DEBUG No node found for nodeType = [" + nodeType +
+ "], propsAndVal = " + propsAndValuesForMsg );
+ }
+
+ return retVertList;
+
+ }// End of getNodeJustUsingKeyParams()
+
+
+
+ /**
+ * Gets the node(s) just using key params.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param graph the graph
+ * @param nodeType the node type
+ * @param windowStartTime the window start time
+ * @param propsHash the props hash
+ * @param apiVersion the api version
+ * @return the nodes
+ * @throws AAIException the AAI exception
+ */
+ public static ArrayList <Vertex> figureOutNodes2Check( String transId, String fromAppId,
+ Graph graph, String nodeType, long windowStartTime,
+ String propsString, EELFLogger logger ) throws AAIException{
+
+ ArrayList <Vertex> retVertList = new ArrayList <Vertex> ();
+ String msg = "";
+ GraphTraversal<Vertex,Vertex> tgQ = graph.traversal().V().has("aai-node-type",nodeType);
+ String qStringForMsg = "graph.traversal().V().has(\"aai-node-type\"," + nodeType + ")";
+
+ if( propsString != null && !propsString.trim().equals("") ){
+ propsString = propsString.trim();
+ int firstPipeLoc = propsString.indexOf("|");
+ if( firstPipeLoc <= 0 ){
+ msg = "Bad props4Collect passed: [" + propsString + "]. \n Expecting a format like, 'propName1|propVal1,propName2|propVal2'";
+ System.out.println(msg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(msg);
+ exit(0);
+ }
+
+ // Note - if they're only passing on parameter, there won't be any commas
+ String [] paramArr = propsString.split(",");
+ for( int i = 0; i < paramArr.length; i++ ){
+ int pipeLoc = paramArr[i].indexOf("|");
+ if( pipeLoc <= 0 ){
+ msg = "Bad propsString passed: [" + propsString + "]. \n Expecting a format like, 'propName1|propVal1,propName2|propVal2'";
+ System.out.println(msg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(msg);
+ exit(0);
+ }
+ else {
+ String propName = paramArr[i].substring(0,pipeLoc);
+ String propVal = paramArr[i].substring(pipeLoc + 1);
+ tgQ = tgQ.has(propName,propVal);
+ qStringForMsg = qStringForMsg + ".has(" + propName + "," + propVal + ")";
+ }
+ }
+ }
+
+ if(tgQ == null){
+ msg = "Bad TitanGraphQuery object. ";
+ System.out.println(msg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+ logger.error(msg);
+ exit(0);
+ }
+ else {
+ Iterator<Vertex> vertItor = tgQ;
+ while( vertItor.hasNext() ){
+ Vertex tiV = vertItor.next();
+ if( windowStartTime <= 0 ){
+ // We're not applying a time-window
+ retVertList.add(tiV);
+ }
+ else {
+ Object objTimeStamp = tiV.property("aai-created-ts").orElse(null);
+ if( objTimeStamp == null ){
+ // No timestamp - so just take it
+ retVertList.add(tiV);
+ }
+ else {
+ long thisNodeCreateTime = (long)objTimeStamp;
+ if( thisNodeCreateTime > windowStartTime ){
+ // It is in our window, so we can take it
+ retVertList.add(tiV);
+ }
+ }
+ }
+ }
+ }
+
+ if( retVertList.size() == 0 ){
+ logger.debug("DEBUG No node found for: [" + qStringForMsg + ", with aai-created-ts > " + windowStartTime );
+ }
+
+ return retVertList;
+
+ }// End of figureOutNodes2Check()
+
+
+ /**
+ * Gets the preferred dupe.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param g the g
+ * @param dupeVertexList the dupe vertex list
+ * @param ver the ver
+ * @param EELFLogger the logger
+ * @return Vertex
+ * @throws AAIException the AAI exception
+ */
+ public static Vertex getPreferredDupe( String transId,
+ String fromAppId, Graph g,
+ ArrayList<Vertex> dupeVertexList, String ver,
+ Boolean specialTenantRule, Loader loader, EELFLogger logger )
+ throws AAIException {
+
+ // This method assumes that it is being passed a List of vertex objects
+ // which violate our uniqueness constraints.
+
+ Vertex nullVtx = null;
+
+ if (dupeVertexList == null) {
+ return nullVtx;
+ }
+ int listSize = dupeVertexList.size();
+ if (listSize == 0) {
+ return nullVtx;
+ }
+ if (listSize == 1) {
+ return ( dupeVertexList.get(0));
+ }
+
+ Vertex vtxPreferred = null;
+ Vertex currentFaveVtx = dupeVertexList.get(0);
+ for (int i = 1; i < listSize; i++) {
+ Vertex vtxB = dupeVertexList.get(i);
+ vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g,
+ currentFaveVtx, vtxB, ver, specialTenantRule, loader, logger);
+ if (vtxPreferred == null) {
+ // We couldn't choose one
+ return nullVtx;
+ } else {
+ currentFaveVtx = vtxPreferred;
+ }
+ }
+
+ return (currentFaveVtx);
+
+ } // end of getPreferredDupe()
+
+
+ /**
+ * Pick one of two dupes.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param g the g
+ * @param vtxA the vtx A
+ * @param vtxB the vtx B
+ * @param ver the ver
+ * @param boolean specialTenantRuleFlag flag
+ * @param EELFLogger the logger
+ * @return Vertex
+ * @throws AAIException the AAI exception
+ */
+ public static Vertex pickOneOfTwoDupes(String transId,
+ String fromAppId, Graph g, Vertex vtxA,
+ Vertex vtxB, String ver, Boolean specialTenantRule, Loader loader, EELFLogger logger) throws AAIException {
+
+ Vertex nullVtx = null;
+ Vertex preferredVtx = null;
+
+ Long vidA = new Long(vtxA.id().toString());
+ Long vidB = new Long(vtxB.id().toString());
+
+ String vtxANodeType = "";
+ String vtxBNodeType = "";
+ Object obj = vtxA.<Object>property("aai-node-type").orElse(null);
+ if (obj != null) {
+ vtxANodeType = obj.toString();
+ }
+ obj = vtxB.<Object>property("aai-node-type").orElse(null);
+ if (obj != null) {
+ vtxBNodeType = obj.toString();
+ }
+
+ if (vtxANodeType.equals("") || (!vtxANodeType.equals(vtxBNodeType))) {
+ // Either they're not really dupes or there's some bad data - so
+ // don't pick one
+ return nullVtx;
+ }
+
+ // Check that node A and B both have the same key values (or else they
+ // are not dupes)
+ // (We'll check dep-node later)
+ Collection<String> keyProps = loader.introspectorFromName(vtxANodeType).getKeys();
+ Iterator<String> keyPropI = keyProps.iterator();
+ while (keyPropI.hasNext()) {
+ String propName = keyPropI.next();
+ String vtxAKeyPropVal = "";
+ obj = vtxA.<Object>property(propName).orElse(null);
+ if (obj != null) {
+ vtxAKeyPropVal = obj.toString();
+ }
+ String vtxBKeyPropVal = "";
+ obj = vtxB.<Object>property(propName).orElse(null);
+ if (obj != null) {
+ vtxBKeyPropVal = obj.toString();
+ }
+
+ if (vtxAKeyPropVal.equals("")
+ || (!vtxAKeyPropVal.equals(vtxBKeyPropVal))) {
+ // Either they're not really dupes or they are missing some key
+ // data - so don't pick one
+ return nullVtx;
+ }
+ }
+
+ // Collect the vid's and aai-node-types of the vertices that each vertex
+ // (A and B) is connected to.
+ ArrayList<String> vtxIdsConn2A = new ArrayList<String>();
+ ArrayList<String> vtxIdsConn2B = new ArrayList<String>();
+ HashMap<String, String> nodeTypesConn2A = new HashMap<String, String>();
+ HashMap<String, String> nodeTypesConn2B = new HashMap<String, String>();
+
+ ArrayList <String> retArr = new ArrayList <String> ();
+ Iterator <Edge> eAI = vtxA.edges(Direction.BOTH);
+ while( eAI.hasNext() ){
+ Edge ed = eAI.next();
+ Vertex tmpVtx;
+ if (vtxA.equals(ed.inVertex())) {
+ tmpVtx = ed.outVertex();
+ } else {
+ tmpVtx = ed.inVertex();
+ }
+ if( tmpVtx == null ){
+ retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+ }
+ else {
+ String conVid = tmpVtx.id().toString();
+ String nt = "";
+ obj = tmpVtx.<Object>property("aai-node-type").orElse(null);
+ if (obj != null) {
+ nt = obj.toString();
+ }
+ nodeTypesConn2A.put(nt, conVid);
+ vtxIdsConn2A.add(conVid);
+ }
+ }
+
+ Iterator <Edge> eBI = vtxB.edges(Direction.BOTH);
+ while( eBI.hasNext() ){
+ Edge ed = eBI.next();
+ Vertex tmpVtx;
+
+ if (vtxB.equals(ed.inVertex())) {
+ tmpVtx = ed.outVertex();
+ } else {
+ tmpVtx = ed.inVertex();
+ }
+ if( tmpVtx == null ){
+ retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+ }
+ else {
+ String conVid = tmpVtx.id().toString();
+ String nt = "";
+ obj = tmpVtx.<Object>property("aai-node-type").orElse(null);
+ if (obj != null) {
+ nt = obj.toString();
+ }
+ nodeTypesConn2B.put(nt, conVid);
+ vtxIdsConn2B.add(conVid);
+ }
+ }
+
+ // 1 - If this kind of node needs a dependent node for uniqueness, then
+ // verify that they both nodes point to the same dependent
+ // node (otherwise they're not really duplicates)
+ // Note - there are sometimes more than one dependent node type since
+ // one nodeType can be used in different ways. But for a
+ // particular node, it will only have one dependent node that
+ // it's connected to.
+ Collection<String> depNodeTypes = loader.introspectorFromName(vtxANodeType).getDependentOn();
+ if (depNodeTypes.isEmpty()) {
+ // This kind of node is not dependent on any other. That is ok.
+ } else {
+ String depNodeVtxId4A = "";
+ String depNodeVtxId4B = "";
+ Iterator<String> iter = depNodeTypes.iterator();
+ while (iter.hasNext()) {
+ String depNodeType = iter.next();
+ if (nodeTypesConn2A.containsKey(depNodeType)) {
+ // This is the dependent node type that vertex A is using
+ depNodeVtxId4A = nodeTypesConn2A.get(depNodeType);
+ }
+ if (nodeTypesConn2B.containsKey(depNodeType)) {
+ // This is the dependent node type that vertex B is using
+ depNodeVtxId4B = nodeTypesConn2B.get(depNodeType);
+ }
+ }
+ if (depNodeVtxId4A.equals("")
+ || (!depNodeVtxId4A.equals(depNodeVtxId4B))) {
+ // Either they're not really dupes or there's some bad data - so
+ // don't pick either one
+ return nullVtx;
+ }
+ }
+
+ if (vtxIdsConn2A.size() == vtxIdsConn2B.size()) {
+ // 2 - If they both have edges to all the same vertices, then return
+ // the one with the lower vertexId.
+
+ // OR (2b)-- if this is the SPECIAL case -- of
+ // "tenant|vserver vs. tenant|service-subscription"
+ // then we pick/prefer the one that's connected to
+ // the service-subscription. AAI-8172
+ boolean allTheSame = true;
+ Iterator<String> iter = vtxIdsConn2A.iterator();
+ while (iter.hasNext()) {
+ String vtxIdConn2A = iter.next();
+ if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+ allTheSame = false;
+ break;
+ }
+ }
+
+ if (allTheSame) {
+ if (vidA < vidB) {
+ preferredVtx = vtxA;
+ } else {
+ preferredVtx = vtxB;
+ }
+ }
+ else if ( specialTenantRule ){
+ // They asked us to apply a special rule if it applies
+ if(vtxIdsConn2A.size() == 2 && vtxANodeType.equals("tenant") ){
+ // We're dealing with two tenant nodes which each just have
+ // two connections. One must be the parent (cloud-region)
+ // which we check in step 1 above. If one connects to
+ // a vserver and the other connects to a service-subscription,
+ // our special rule is to keep the one connected
+ // to the
+ if( nodeTypesConn2A.containsKey("vserver") && nodeTypesConn2B.containsKey("service-subscription") ){
+ String infMsg = " WARNING >>> we are using the special tenant rule to choose to " +
+ " delete tenant vtxId = " + vidA + ", and keep tenant vtxId = " + vidB ;
+ System.out.println(infMsg);
+ logger.info( infMsg );
+ preferredVtx = vtxB;
+ }
+ else if( nodeTypesConn2B.containsKey("vserver") && nodeTypesConn2A.containsKey("service-subscription") ){
+ String infMsg = " WARNING >>> we are using the special tenant rule to choose to " +
+ " delete tenant vtxId = " + vidB + ", and keep tenant vtxId = " + vidA ;
+ System.out.println(infMsg);
+ logger.info( infMsg );
+ preferredVtx = vtxA;
+ }
+ }
+ }
+ } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) {
+ // 3 - VertexA is connected to more things than vtxB.
+ // We'll pick VtxA if its edges are a superset of vtxB's edges.
+ boolean missingOne = false;
+ Iterator<String> iter = vtxIdsConn2B.iterator();
+ while (iter.hasNext()) {
+ String vtxIdConn2B = iter.next();
+ if (!vtxIdsConn2A.contains(vtxIdConn2B)) {
+ missingOne = true;
+ break;
+ }
+ }
+ if (!missingOne) {
+ preferredVtx = vtxA;
+ }
+ } else if (vtxIdsConn2B.size() > vtxIdsConn2A.size()) {
+ // 4 - VertexB is connected to more things than vtxA.
+ // We'll pick VtxB if its edges are a superset of vtxA's edges.
+ boolean missingOne = false;
+ Iterator<String> iter = vtxIdsConn2A.iterator();
+ while (iter.hasNext()) {
+ String vtxIdConn2A = iter.next();
+ if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+ missingOne = true;
+ break;
+ }
+ }
+ if (!missingOne) {
+ preferredVtx = vtxB;
+ }
+ } else {
+ preferredVtx = nullVtx;
+ }
+
+ return (preferredVtx);
+
+ } // end of pickOneOfTwoDupes()
+
+
+ /**
+ * Group verts by dep nodes.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param g the g
+ * @param version the version
+ * @param nType the n type
+ * @param passedVertList the passed vert list
+ * @param dbMaps the db maps
+ * @return the hash map
+ * @throws AAIException the AAI exception
+ */
+ private static HashMap<String, ArrayList<Vertex>> groupVertsByDepNodes(
+ String transId, String fromAppId, Graph g, String version,
+ String nType, ArrayList<Vertex> passedVertList, Loader loader)
+ throws AAIException {
+
+ // Given a list of Titan Vertices, group them together by dependent
+ // nodes. Ie. if given a list of ip address nodes (assumed to all
+ // have the same key info) they might sit under several different
+ // parent vertices.
+ // Under Normal conditions, there would only be one per parent -- but
+ // we're trying to find duplicates - so we allow for the case
+ // where more than one is under the same parent node.
+
+ HashMap<String, ArrayList<Vertex>> retHash = new HashMap<String, ArrayList<Vertex>>();
+
+ // Find out what types of nodes the passed in nodes can depend on
+ ArrayList<String> depNodeTypeL = new ArrayList<String>();
+ Collection<String> depNTColl = loader.introspectorFromName(nType).getDependentOn();
+ Iterator<String> ntItr = depNTColl.iterator();
+ while (ntItr.hasNext()) {
+ depNodeTypeL.add(ntItr.next());
+ }
+ // For each vertex they passed us, we want find the vertex it
+ // is dependent on so we can keep track of who-all is connected
+ // to that parent.
+ if (passedVertList != null) {
+ Iterator<Vertex> iter = passedVertList.iterator();
+ while (iter.hasNext()) {
+ Vertex thisVert = iter.next();
+ Iterator <String> depNtItr = depNTColl.iterator();
+ while (depNtItr.hasNext()) {
+ GraphTraversal<Vertex, Vertex> modPipe = null;
+ // NOTE -- if we change the direction of parent/child edges, we will need
+ // the "in" below to become "out"
+ modPipe = g.traversal().V(thisVert).in().has("aai-node-type", depNtItr.next() );
+ if( modPipe == null || !modPipe.hasNext() ){
+ //System.out.println("DEBUG - didn't find any [" + targetStep + "] connected to this guy (which is ok)");
+ }
+ else {
+ while( modPipe.hasNext() ){
+ Vertex depVert = modPipe.next();
+ String parentVid = depVert.id().toString();
+ if (retHash.containsKey(parentVid)) {
+ // add this vert to the list for this parent key
+ retHash.get(parentVid).add(thisVert);
+ } else {
+ // This is the first one we found on this parent
+ ArrayList<Vertex> vList = new ArrayList<Vertex>();
+ vList.add(thisVert);
+ retHash.put(parentVid, vList);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return retHash;
+
+ }// end of groupVertsByDepNodes()
+
+
+ /**
+ * Delete non keepers if appropriate.
+ *
+ * @param g the g
+ * @param dupeInfoList the dupe info string
+ * @param logger the EELFLogger
+ * @return the boolean
+ */
+ private static Boolean deleteNonKeepers(Graph g,
+ ArrayList<String> dupeInfoList, EELFLogger logger ) {
+
+ // This assumes that each dupeInfoString is in the format of
+ // pipe-delimited vid's followed by either "keepVid=xyz" or "keepVid=UNDETERMINED"
+ // ie. "3456|9880|keepVid=3456"
+
+ boolean didADelFlag = false;
+ for( int n = 0; n < dupeInfoList.size(); n++ ){
+ String dupeInfoString = dupeInfoList.get(n);
+ boolean tmpFlag = deleteNonKeeperForOneSet( g, dupeInfoString, logger );
+ didADelFlag = tmpFlag | didADelFlag;
+ }
+
+ return didADelFlag;
+
+ }// end of deleteNonKeepers()
+
+
+ /**
+ * Delete non keepers if appropriate.
+ *
+ * @param g the g
+ * @param dupeSetStr the dupe string
+ * @param logger the EELFLogger
+ * @return the boolean
+ */
+ private static Boolean deleteNonKeeperForOneSet(Graph g,
+ String dupeInfoString, EELFLogger logger ) {
+
+ Boolean deletedSomething = false;
+ // This assumes that each dupeInfoString is in the format of
+ // pipe-delimited vid's followed by either "keepVid=xyz" or "keepVid=UNDETERMINED"
+ // ie. "3456|9880|keepVid=3456"
+
+
+ String[] dupeArr = dupeInfoString.split("\\|");
+ ArrayList<String> idArr = new ArrayList<String>();
+ int lastIndex = dupeArr.length - 1;
+ for (int i = 0; i <= lastIndex; i++) {
+ if (i < lastIndex) {
+ // This is not the last entry, it is one of the dupes,
+ String vidString = dupeArr[i];
+ idArr.add(vidString);
+ } else {
+ // This is the last entry which should tell us if we have a
+ // preferred keeper
+ String prefString = dupeArr[i];
+ if (prefString.equals("KeepVid=UNDETERMINED")) {
+ // They sent us a bad string -- nothing should be deleted if
+ // no dupe could be tagged as preferred.
+ return false;
+ } else {
+ // If we know which to keep, then the prefString should look
+ // like, "KeepVid=12345"
+ String[] prefArr = prefString.split("=");
+ if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
+ String emsg = "Bad format. Expecting KeepVid=999999";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ return false;
+ } else {
+ String keepVidStr = prefArr[1];
+ if (idArr.contains(keepVidStr)) {
+ idArr.remove(keepVidStr);
+ // So now, the idArr should just contain the vid's
+ // that we want to remove.
+ for (int x = 0; x < idArr.size(); x++) {
+ boolean okFlag = true;
+ String thisVid = idArr.get(x);
+ try {
+ long longVertId = Long.parseLong(thisVid);
+ Vertex vtx = g.traversal().V(longVertId).next();
+ String msg = "--->>> We will delete node with VID = " + thisVid + " <<<---";
+ System.out.println(msg);
+ logger.info(msg);
+ vtx.remove();
+ }
+ catch (Exception e) {
+ okFlag = false;
+ String emsg = "ERROR trying to delete VID = " + thisVid + ", [" + e + "]";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ }
+ if (okFlag) {
+ String infMsg = " DELETED VID = " + thisVid;
+ logger.info(infMsg);
+ System.out.println(infMsg);
+ deletedSomething = true;
+ }
+ }
+ } else {
+ String emsg = "ERROR - Vertex Id to keep not found in list of dupes. dupeInfoString = ["
+ + dupeInfoString + "]";
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ System.out.println(emsg);
+ return false;
+ }
+ }
+ }// else we know which one to keep
+ }// else last entry
+ }// for each vertex in a group
+
+ return deletedSomething;
+
+ }// end of deleteNonKeeperForOneSet()
+
+
+ /**
+ * Get values of the key properties for a node.
+ *
+ * @param tvx the vertex to pull the properties from
+ * @param keyPropertyNames ArrayList (ordered) of key prop names
+ * @param logger the EELFLogger
+ * @return a hashMap of the propertyNames/values
+ */
+ private static HashMap <String,Object> getNodeKeyVals( Vertex tvx,
+ ArrayList <String> keyPropNamesArr, EELFLogger logger ) {
+
+ HashMap <String,Object> retHash = new HashMap <String,Object>();
+ Iterator <String> propItr = keyPropNamesArr.iterator();
+ while( propItr.hasNext() ){
+ String propName = propItr.next();
+ if( tvx != null ){
+ Object propValObj = tvx.property(propName).orElse(null);
+ retHash.put(propName, propValObj);
+ }
+ }
+ return retHash;
+
+ }// End of getNodeKeyVals()
+
+
+ /**
+ * Get values of the key properties for a node as a single string
+ *
+ * @param tvx the vertex to pull the properties from
+ * @param keyPropertyNames collection of key prop names
+ * @param logger the EELFLogger
+ * @return a String of concatenated values
+ */
+ private static String getNodeKeyValString( Vertex tvx,
+ ArrayList <String> keyPropNamesArr, EELFLogger logger ) {
+
+ // -- NOTE -- for what we're using this for, we would need to
+ // guarantee that the properties are always in the same order
+
+ String retString = "";
+ Iterator <String> propItr = keyPropNamesArr.iterator();
+ while( propItr.hasNext() ){
+ String propName = propItr.next();
+ if( tvx != null ){
+ Object propValObj = tvx.property(propName).orElse(null);
+ retString = " " + retString + propValObj.toString();
+ }
+ }
+ return retString;
+
+ }// End of getNodeKeyValString()
+
+
+ /**
+ * Find duplicate sets from two dupe runs.
+ *
+ * @param firstPassDupeSets from the first pass
+ * @param secondPassDupeSets from the second pass
+ * @param EELFLogger logger
+ * @return commonDupeSets that are common to both passes and have a determined keeper
+ */
+ private static ArrayList <String> figureWhichDupesStillNeedFixing( ArrayList <String>firstPassDupeSets,
+ ArrayList <String> secondPassDupeSets, EELFLogger logger ){
+
+ ArrayList <String> common2BothSet = new ArrayList <String> ();
+
+ // We just want to look for entries from the first set which have identical (almost)
+ // entries in the secondary set. I say "almost" because the order of the
+ // vid's to delete may be in a different order, but we only want to use it if
+ // they have all the same values. Note also - we're just looking for
+ // the sets where we have a candidate to delete.
+
+ // The duplicate-set Strings are in this format:
+ // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+ // couldn't figure out which one to keep)
+ // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+ // thought the third one was the one that should survive)
+
+ if( firstPassDupeSets == null || firstPassDupeSets.isEmpty()
+ || secondPassDupeSets == null || secondPassDupeSets.isEmpty() ){
+ // If either set is empty, then our return list has to be empty too
+ return common2BothSet;
+ }
+
+ boolean needToParse = false;
+ for( int x = 0; x < secondPassDupeSets.size(); x++ ){
+ String secPassDupeSetStr = secondPassDupeSets.get(x);
+ if( secPassDupeSetStr.endsWith("UNDETERMINED") ){
+ // This is a set of dupes where we could not pick one
+ // to delete - so don't include it on our list for
+ // fixing.
+ }
+ else if( firstPassDupeSets.contains(secPassDupeSetStr) ){
+ // We have lucked out and do not even need to parse this since
+ // it was in the other array with any dupes listed in the same order
+ // This is actually the most common scenario since there is
+ // usually only one dupe, so order doesn't matter.
+ common2BothSet.add(secPassDupeSetStr);
+ }
+ else {
+ // We'll need to do some parsing to check this one
+ needToParse = true;
+ }
+ }
+
+ if( needToParse ){
+ // Make a hash from the first and second Pass data
+ // where the key is the vid to KEEP and the value is an
+ // array of (String) vids that would get deleted.
+ HashMap <String,ArrayList<String>> firstPassHash = makeKeeperHashOfDupeStrings( firstPassDupeSets, common2BothSet, logger );
+
+ HashMap <String,ArrayList<String>> secPassHash = makeKeeperHashOfDupeStrings( secondPassDupeSets, common2BothSet, logger );
+
+ // Loop through the secondPass data and keep the ones
+ // that check out against the firstPass set.
+ for( Map.Entry<String, ArrayList<String>> entry : secPassHash.entrySet() ){
+ boolean skipThisOne = false;
+ String secKey = entry.getKey();
+ ArrayList <String> secList = entry.getValue();
+ if( !firstPassHash.containsKey(secKey) ){
+ // The second pass found this delete candidate, but not the first pass
+ skipThisOne = true;
+ }
+ else {
+ // They both think they should keep this VID, check the associated deletes for it.
+ ArrayList <String> firstList = firstPassHash.get(secKey);
+ for( int z = 0; z < secList.size(); z++ ){
+ if( !firstList.contains(secList.get(z)) ){
+ // The first pass did not think this needed to be deleted
+ skipThisOne = true;
+ }
+ }
+ }
+ if( !skipThisOne ){
+ // Put the string back together and pass it back
+ // Not beautiful, but no time to make it nice right now...
+ // Put it back in the format: "3456|9880|keepVid=3456"
+ String thisDelSetStr = "";
+ for( int z = 0; z < secList.size(); z++ ){
+ if( z == 0 ){
+ thisDelSetStr = secList.get(z);
+ }
+ else {
+ thisDelSetStr = thisDelSetStr + "|" + secList.get(z);
+ }
+ }
+ thisDelSetStr = thisDelSetStr + "|keepVid=" + secKey;
+ common2BothSet.add(thisDelSetStr);
+ }
+ }
+
+ }
+ return common2BothSet;
+
+ }// figureWhichDupesStillNeedFixing
+
+
+ private static HashMap <String, ArrayList <String>> makeKeeperHashOfDupeStrings( ArrayList <String> dupeSets,
+ ArrayList <String> excludeSets, EELFLogger logger ){
+
+ HashMap <String,ArrayList<String>> keeperHash = new HashMap <String, ArrayList<String>>();
+
+ for( int x = 0; x < dupeSets.size(); x++ ){
+ String tmpSetStr = dupeSets.get(x);
+ if( excludeSets.contains(tmpSetStr) ){
+ // This isn't one of the ones we needed to parse.
+ continue;
+ }
+
+ String[] dupeArr = tmpSetStr.split("\\|");
+ ArrayList<String> delIdArr = new ArrayList<String>();
+ int lastIndex = dupeArr.length - 1;
+ for (int i = 0; i <= lastIndex; i++) {
+ if (i < lastIndex) {
+ // This is not the last entry, it is one of the dupes
+ delIdArr.add(dupeArr[i]);
+ }
+ else {
+ // This is the last entry which should tell us if we
+ // have a preferred keeper and how many dupes we had
+ String prefString = dupeArr[i];
+ if( i == 1 ){
+ // There was only one dupe, so if we were gonna find
+ // it, we would have found it above with no parsing.
+ }
+ else if (prefString.equals("KeepVid=UNDETERMINED")) {
+ // This one had no determined keeper, so we don't
+ // want it.
+ }
+ else {
+ // If we know which to keep, then the prefString
+ // should look like, "KeepVid=12345"
+ String[] prefArr = prefString.split("=");
+ if( prefArr.length != 2
+ || (!prefArr[0].equals("KeepVid")) ) {
+ String infMsg = "Bad format in figureWhichDupesStillNeedFixing(). Expecting " +
+ " KeepVid=999999 but string looks like: [" + tmpSetStr + "]";
+ System.out.println(infMsg);
+ logger.info(infMsg);
+ }
+ else {
+ keeperHash.put(prefArr[0], delIdArr);
+ }
+ }
+ }
+ }
+ }
+
+ return keeperHash;
+
+ }// End makeHashOfDupeStrings()
+
+
+ /**
+ * Get values of the key properties for a node.
+ *
+ * @param g the g
+ * @param dupeInfoString
+ * @param logger the EELFLogger
+ * @return void
+ */
+ static private void showNodeDetailsForADupeSet(Graph g, String dupeInfoString, EELFLogger logger) {
+
+ // dang... parsing this string once again...
+
+ String[] dupeArr = dupeInfoString.split("\\|");
+ int lastIndex = dupeArr.length - 1;
+ for (int i = 0; i <= lastIndex; i++) {
+ if (i < lastIndex) {
+ // This is not the last entry, it is one of the dupes,
+ String vidString = dupeArr[i];
+ long longVertId = Long.parseLong(vidString);
+ Vertex vtx = g.traversal().V(longVertId).next();
+ showNodeInfo(logger, vtx, false);
+ } else {
+ // This is the last entry which should tell us if we have a
+ // preferred keeper
+ String prefString = dupeArr[i];
+ if (prefString.equals("KeepVid=UNDETERMINED")) {
+ String msg = " Our algorithm cannot choose from among these, so they will all be kept. -------\n";
+ System.out.println(msg);
+ logger.info(msg);
+ } else {
+ // If we know which to keep, then the prefString should look
+ // like, "KeepVid=12345"
+ String[] prefArr = prefString.split("=");
+ if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
+ String emsg = "Bad format. Expecting KeepVid=999999";
+ System.out.println(emsg);
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ logger.error(emsg);
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LoggingContext.responseCode(LoggingContext.SUCCESS);
+ } else {
+ String keepVidStr = prefArr[1];
+ String msg = " vid = " + keepVidStr + " is the one that we would KEEP. ------\n";
+ System.out.println(msg);
+ logger.info(msg);
+ }
+ }
+ }
+ }
+
+ }// End of showNodeDetailsForADupeSet()
+
+ private static int graphIndex = 1;
+
+ public static TitanGraph setupGraph(EELFLogger logger){
+
+ TitanGraph titanGraph = null;
+
+
+ try (InputStream inputStream = new FileInputStream(AAIConstants.REALTIME_DB_CONFIG);){
+
+ Properties properties = new Properties();
+ properties.load(inputStream);
+
+ if("inmemory".equals(properties.get("storage.backend"))){
+ titanGraph = AAIGraph.getInstance().getGraph();
+ graphType = "inmemory";
+ } else {
+ titanGraph = TitanFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DupeTool.class.getSimpleName()).withGraphType("realtime" + graphIndex).buildConfiguration());
+ graphIndex++;
+ }
+ } catch (Exception e) {
+ logger.error("Unable to open the graph", LogFormatTools.getStackTop(e));
+ }
+
+ return titanGraph;
+ }
+
+ public static void closeGraph(TitanGraph graph, EELFLogger logger){
+
+ try {
+ if("inmemory".equals(graphType)) {
+ return;
+ }
+ if( graph != null && graph.isOpen() ){
+ graph.tx().close();
+ graph.close();
+ }
+ } catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
+ logger.warn("WARNING from final graph.shutdown()", ex);
+ }
+ }
+}
+
+
diff --git a/aai-resources/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java b/aai-resources/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java
index 9a7fc39..7dc43d7 100644
--- a/aai-resources/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java
+++ b/aai-resources/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java
@@ -20,12 +20,16 @@
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*/
package org.onap.aai.dbgen;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Properties;
import java.util.Scanner;
import java.util.UUID;
+import org.apache.commons.configuration.ConfigurationException;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.Direction;
@@ -33,7 +37,9 @@ import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.apache.tinkerpop.gremlin.structure.VertexProperty;
import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.dbmap.AAIGraph;
import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.LogFormatTools;
import org.onap.aai.logging.LoggingContext;
import org.onap.aai.logging.LoggingContext.StatusCode;
import org.onap.aai.serialization.db.AAIDirection;
@@ -53,6 +59,20 @@ import com.thinkaurelius.titan.core.TitanGraph;
public class ForceDeleteTool {
private static final String FROMAPPID = "AAI-DB";
private static final String TRANSID = UUID.randomUUID().toString();
+
+ private static String graphType = "realdb";
+
+ public static boolean SHOULD_EXIT_VM = true;
+
+ public static int EXIT_VM_STATUS_CODE = -1;
+
+ public static void exit(int statusCode){
+ if(SHOULD_EXIT_VM){
+ System.exit(1);
+ }
+ EXIT_VM_STATUS_CODE = statusCode;
+ }
+
/*
* The main method.
*
@@ -101,7 +121,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(" No value passed with -action option. ");
- System.exit(0);
+ exit(0);
}
actionVal = args[i];
argStr4Msg = argStr4Msg + " " + actionVal;
@@ -112,7 +132,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(" No value passed with -userId option. ");
- System.exit(0);
+ exit(0);
}
userIdVal = args[i];
argStr4Msg = argStr4Msg + " " + userIdVal;
@@ -129,7 +149,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(" No value passed with -vertexId option. ");
- System.exit(0);
+ exit(0);
}
String nextArg = args[i];
argStr4Msg = argStr4Msg + " " + nextArg;
@@ -140,7 +160,7 @@ public class ForceDeleteTool {
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error("Bad value passed with -vertexId option: ["
+ nextArg + "]");
- System.exit(0);
+ exit(0);
}
}
else if (thisArg.equals("-params4Collect")) {
@@ -149,7 +169,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(" No value passed with -params4Collect option. ");
- System.exit(0);
+ exit(0);
}
dataString = args[i];
argStr4Msg = argStr4Msg + " " + dataString;
@@ -160,7 +180,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(" No value passed with -edgeId option. ");
- System.exit(0);
+ exit(0);
}
String nextArg = args[i];
argStr4Msg = argStr4Msg + " " + nextArg;
@@ -172,7 +192,7 @@ public class ForceDeleteTool {
logger.error(" Unrecognized argument passed to ForceDeleteTool: ["
+ thisArg + "]. ");
logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection -params4Collect -DISPLAY_ALL_VIDS");
- System.exit(0);
+ exit(0);
}
}
}
@@ -183,7 +203,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(emsg);
- System.exit(0);
+ exit(0);
}
if( actionVal.equals("DELETE_NODE") && vertexIdLong == 0 ){
@@ -192,7 +212,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(emsg);
- System.exit(0);
+ exit(0);
}
else if( actionVal.equals("DELETE_EDGE") && edgeIdStr.equals("")){
String emsg = "ERROR: No edge ID passed on DELETE_EDGE request. \n";
@@ -200,7 +220,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(emsg);
- System.exit(0);
+ exit(0);
}
@@ -211,7 +231,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(emsg);
- System.exit(0);
+ exit(0);
}
String msg = "";
@@ -219,14 +239,14 @@ public class ForceDeleteTool {
try {
AAIConfig.init();
System.out.println(" ---- NOTE --- about to open graph (takes a little while)--------\n");
- graph = TitanFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(ForceDelete.class.getSimpleName()).withGraphType("realtime1").buildConfiguration());
+ graph = setupGraph(logger);
if( graph == null ){
String emsg = "could not get graph object in ForceDeleteTool() \n";
System.out.println(emsg);
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
logger.error(emsg);
- System.exit(0);
+ exit(0);
}
}
catch (AAIException e1) {
@@ -235,7 +255,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
logger.error(msg);
- System.exit(0);
+ exit(0);
}
catch (Exception e2) {
msg = e2.toString();
@@ -243,7 +263,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
logger.error(msg);
- System.exit(0);
+ exit(0);
}
msg = "ForceDelete called by: userId [" + userIdVal + "] with these params: [" + argStr4Msg + "]";
@@ -265,7 +285,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(msg);
- System.exit(0);
+ exit(0);
}
GraphTraversal<Vertex, Vertex> g = graph.traversal().V();
String qStringForMsg = " graph.traversal().V()";
@@ -279,7 +299,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logger.error(msg);
- System.exit(0);
+ exit(0);
}
else {
String propName = paramArr[i].substring(0,pipeLoc);
@@ -306,7 +326,7 @@ public class ForceDeleteTool {
LoggingContext.statusCode(StatusCode.ERROR);
LoggingContext.responseCode(LoggingContext.DATA_ERROR);
logger.error(msg);
- System.exit(0);
+ exit(0);
}
String infMsg = "\n\n Found: " + resCount + " nodes for this query: [" + qStringForMsg + "]\n";
@@ -360,7 +380,7 @@ public class ForceDeleteTool {
String infMsg = ">>>>>>>>>> Edge with edgeId = " + edgeIdStr + " not found.";
logger.info( infMsg );
System.out.println(infMsg);
- System.exit(0);
+ exit(0);
}
if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){
@@ -375,16 +395,17 @@ public class ForceDeleteTool {
System.out.println(infMsg);
logger.info( infMsg );
}
- System.exit(0);
+ exit(0);
}
else {
String emsg = "Unknown action parameter [" + actionVal + "] passed to ForceDeleteTool(). Valid values = COLLECT_DATA, DELETE_NODE or DELETE_EDGE \n";
System.out.println(emsg);
logger.info( emsg );
- System.exit(0);
+ exit(0);
}
- System.exit(0);
+ closeGraph(graph, logger);
+ exit(0);
}// end of main()
@@ -792,7 +813,49 @@ public class ForceDeleteTool {
} // End of getNodeDelConfirmation()
}
-
+
+ public static TitanGraph setupGraph(EELFLogger logger){
+
+ TitanGraph titanGraph = null;
+
+ try (InputStream inputStream = new FileInputStream(AAIConstants.REALTIME_DB_CONFIG);){
+
+ Properties properties = new Properties();
+ properties.load(inputStream);
+
+ if("inmemory".equals(properties.get("storage.backend"))){
+ titanGraph = AAIGraph.getInstance().getGraph();
+ graphType = "inmemory";
+ } else {
+ titanGraph = TitanFactory.open(
+ new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG)
+ .forService(ForceDeleteTool.class.getSimpleName())
+ .withGraphType("realtime1")
+ .buildConfiguration()
+ );
+ }
+ } catch (Exception e) {
+ logger.error("Unable to open the graph", LogFormatTools.getStackTop(e));
+ }
+
+ return titanGraph;
+ }
+
+ public static void closeGraph(TitanGraph graph, EELFLogger logger){
+
+ try {
+ if("inmemory".equals(graphType)) {
+ return;
+ }
+ if( graph != null && graph.isOpen() ){
+ graph.tx().close();
+ graph.close();
+ }
+ } catch (Exception ex) {
+ // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
+ logger.warn("WARNING from final graph.shutdown()", ex);
+ }
+ }
}
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java b/aai-resources/src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java
index 71c290b..fdd3edb 100644
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java
@@ -19,20 +19,25 @@
*
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*/
-package org.onap.aai.ajsc_aai.util;
+package org.onap.aai.interceptors;
-import org.onap.aai.ajsc_aai.filemonitor.ServicePropertiesMap;
+import java.util.UUID;
-public class ServicePropertiesMapBean {
+import org.onap.aai.util.FormatDate;
- /**
- * Gets the property.
- *
- * @param propFileName the prop file name
- * @param propertyKey the property key
- * @return the property
- */
- public static String getProperty(String propFileName, String propertyKey) {
- return ServicePropertiesMap.getProperty(propFileName, propertyKey);
+public abstract class AAIContainerFilter {
+
+ protected String genDate() {
+ FormatDate fd = new FormatDate("YYMMdd-HH:mm:ss:SSS");
+ return fd.getDateTime();
+ }
+
+ protected boolean isValidUUID(String transId) {
+ try {
+ UUID.fromString(transId);
+ } catch (IllegalArgumentException e) {
+ return false;
+ }
+ return true;
}
}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java b/aai-resources/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
index 733383a..8eca9b6 100644
--- a/aai-resources/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
@@ -21,7 +21,21 @@
*/
package org.onap.aai.interceptors;
-public class AAIHeaderProperties {
-
+public final class AAIHeaderProperties {
+
+ private AAIHeaderProperties(){}
+
public static final String REQUEST_CONTEXT = "aai-request-context";
+
+ public static final String HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
+
+ public static final String TRANSACTION_ID = "X-TransactionId";
+
+ public static final String FROM_APP_ID = "X-FromAppId";
+
+ public static final String AAI_TX_ID = "X-AAI-TXID";
+
+ public static final String AAI_REQUEST = "X-REQUEST";
+
+ public static final String AAI_REQUEST_TS = "X-REQUEST-TS";
}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java b/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java
deleted file mode 100644
index 7d8112d..0000000
--- a/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.interceptors;
-
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.ws.rs.core.MediaType;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.cxf.helpers.CastUtils;
-import org.apache.cxf.interceptor.LoggingMessage;
-import org.apache.cxf.jaxrs.interceptor.JAXRSInInterceptor;
-import org.apache.cxf.message.Message;
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.rest.util.EchoResponse;
-import org.onap.aai.util.AAIConfig;
-import org.onap.aai.util.AAIConstants;
-import org.onap.aai.util.FormatDate;
-import org.onap.aai.util.HbaseSaltPrefixer;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.slf4j.MDC;
-
-public class AAILogJAXRSInInterceptor extends JAXRSInInterceptor {
-
- protected final String COMPONENT = "aairest";
- protected final String CAMEL_REQUEST ="CamelHttpUrl";
- private static final Pattern uuidPattern = Pattern.compile("^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$");
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAILogJAXRSInInterceptor.class);
-
- /**
- * {@inheritDoc}
- */
- public void handleMessage(Message message) {
-
- boolean go = false;
- String uri = null;
- String query = null;
- try {
-
- uri = (String)message.get(CAMEL_REQUEST);
- if (uri != null) {
- query = (String)message.get(Message.QUERY_STRING);
- }
-
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_INTERCEPTOR).equalsIgnoreCase("true") &&
- AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_ENABLED).equalsIgnoreCase("true")) {
- go = true;
- message.getExchange().put("AAI_LOGGING_HBASE_ENABLED", 1);
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_LOGREQUEST).equalsIgnoreCase("true") ) {
- message.getExchange().put("AAI_LOGGING_HBASE_LOGREQUEST", 1);
- }
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_LOGRESPONSE).equalsIgnoreCase("true") ) {
- message.getExchange().put("AAI_LOGGING_HBASE_LOGRESPONSE", 1);
- }
- }
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_ENABLED).equalsIgnoreCase("true") ) {
- go = true;
- message.getExchange().put("AAI_LOGGING_TRACE_ENABLED", 1);
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_LOGREQUEST).equalsIgnoreCase("true") ) {
- message.getExchange().put("AAI_LOGGING_TRACE_LOGREQUEST", 1);
- }
- if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_LOGRESPONSE).equalsIgnoreCase("true") ) {
- message.getExchange().put("AAI_LOGGING_TRACE_LOGRESPONSE", 1);
- }
- }
- } catch (AAIException e1) {
- ErrorLogHelper.logException(e1);
- }
-
- if (uri.contains(EchoResponse.echoPath)) {
- // if it's a health check, we don't want to log ANYTHING if it's a lightweight one
- if (query == null) {
- if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED")) {
- message.getExchange().remove("AAI_LOGGING_HBASE_ENABLED");
- }
- if (message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
- message.getExchange().remove("AAI_LOGGING_TRACE_ENABLED");
- }
- go = false;
- }
- }
- else if (uri.contains("/translog/")) {
- // if it's a translog query, we don't want to log the responses
- if (message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGRESPONSE")) {
- message.getExchange().remove("AAI_LOGGING_HBASE_LOGRESPONSE");
- }
- if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGRESPONSE")) {
- message.getExchange().remove("AAI_LOGGING_TRACE_LOGRESPONSE");
- }
- }
-
- if (go == false) { // there's nothing to do
- return;
- }
-
- // DONE: get a TXID based on hostname, time (YYYYMMDDHHMMSSMILLIS, and LoggingMessage.nextId(); 20150326145301-1
- String now = genDate();
-
- message.getExchange().put("AAI_RQST_TM", now);
-
- String id = (String)message.getExchange().get(LoggingMessage.ID_KEY);
-
- String fullId = null;
- try {
- if (id == null) {
- id = LoggingMessage.nextId();
- }
- fullId = AAIConfig.get(AAIConstants.AAI_NODENAME) + "-" + now + "-" + id;
- fullId = HbaseSaltPrefixer.getInstance().prependSalt(fullId);
- message.getExchange().put(LoggingMessage.ID_KEY, fullId);
- } catch (AAIException e1) {
- LOGGER.debug("config problem", e1);
- }
-
- if (fullId == null) {
- fullId = now + "-" + id;
- fullId = HbaseSaltPrefixer.getInstance().prependSalt(fullId);
- }
- message.put(LoggingMessage.ID_KEY, fullId);
- final LoggingMessage buffer = new LoggingMessage("Message", fullId);
-
- Integer responseCode = (Integer)message.get(Message.RESPONSE_CODE);
- if (responseCode != null) {
- buffer.getResponseCode().append(responseCode);
- }
-
- String encoding = (String)message.get(Message.ENCODING);
-
- if (encoding != null) {
- buffer.getEncoding().append(encoding);
- }
- String httpMethod = (String)message.get(Message.HTTP_REQUEST_METHOD);
- if (httpMethod != null) {
- buffer.getHttpMethod().append(httpMethod);
- }
-
- String ct = (String)message.get(Message.CONTENT_TYPE);
- if (ct != null) {
- if ("*/*".equals(ct)) {
- message.put(Message.CONTENT_TYPE, MediaType.APPLICATION_JSON);
- ct = MediaType.APPLICATION_JSON;
- }
- buffer.getContentType().append(ct);
-
- }
- Object headers = message.get(Message.PROTOCOL_HEADERS);
- if (headers != null) {
- buffer.getHeader().append(headers);
-
- Map<String, List<String>> headersList = CastUtils.cast((Map<?, ?>)message.get(Message.PROTOCOL_HEADERS));
- String transId = "";
- List<String> xt = headersList.get("X-TransactionId");
- String newTransId = transId;
- boolean missingTransId = false;
- boolean replacedTransId = false;
- String logMsg = null;
- if (xt != null) {
- for (String transIdValue : xt) {
- transId = transIdValue;
- }
- Matcher matcher = uuidPattern.matcher(transId);
- if (!matcher.find()) {
- replacedTransId = true;
- // check if there's a colon, and check the first group?
- if (transId.contains(":")) {
- String[] uuidParts = transId.split(":");
- Matcher matcher2 = uuidPattern.matcher(uuidParts[0]);
- if (matcher2.find()) {
- newTransId = uuidParts[0];
- } else {
- // punt, we tried to find it, it has a colon but no UUID-1
- newTransId = UUID.randomUUID().toString();
- }
- } else {
- newTransId = UUID.randomUUID().toString();
- }
- }
- } else {
- newTransId = UUID.randomUUID().toString();
- missingTransId = true;
- }
-
- if (missingTransId || replacedTransId) {
- List<String> txList = new ArrayList<String>();
- txList.add(newTransId);
- headersList.put("X-TransactionId", txList);
- if (missingTransId) {
- logMsg = "Missing requestID. Assigned " + newTransId;
- } else if (replacedTransId) {
- logMsg = "Replaced invalid requestID of " + transId + " Assigned " + newTransId;
- }
- MDC.put("RequestId",newTransId);
- }
- else {
- MDC.put("RequestId",transId);
- }
-
- List<String> fromAppIdList = headersList.get("X-FromAppId");
- if (fromAppIdList != null) {
- String fromAppId = null;
- for (String fromAppIdValue : fromAppIdList) {
- fromAppId = fromAppIdValue;
- }
- MDC.put("PartnerName",fromAppId);
- }
-
- List<String> contentType = headersList.get("Content-Type");
- if (contentType == null) {
- ct = (String)message.get(Message.CONTENT_TYPE);
- headersList.put(Message.CONTENT_TYPE, Collections.singletonList(ct));
- }
-
- LOGGER.auditEvent("REST " + httpMethod + " " + ((query != null)? uri+"?"+query : uri) + " HbaseTxId=" + fullId);
- LOGGER.info(logMsg);
- }
-
-
- if (uri != null) {
- buffer.getAddress().append(uri);
- if (query != null) {
- buffer.getAddress().append("?").append(query);
- }
- }
-
- InputStream is = message.getContent(InputStream.class);
- if (is != null) {
- try {
- String currentPayload = IOUtils.toString(is, "UTF-8");
- IOUtils.closeQuietly(is);
- buffer.getPayload().append(currentPayload);
- is = IOUtils.toInputStream(currentPayload, "UTF-8");
- message.setContent(InputStream.class, is);
- IOUtils.closeQuietly(is);
- } catch (Exception e) {
- // It's ok to not have request input content
- // throw new Fault(e);
- }
- }
-
- // this will be saved in the message exchange, and can be pulled out later...
- message.getExchange().put(fullId + "_REQUEST", buffer.toString());
- }
-
- /**
- * Gen date.
- *
- * @param aaiLogger the aai logger
- * @param logline the logline
- * @return the string
- */
- protected String genDate() {
- FormatDate fd = new FormatDate( "YYMMdd-HH:mm:ss:SSS");
- return fd.getDateTime();
- }
-
-}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java b/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java
deleted file mode 100644
index 3b1f50c..0000000
--- a/aai-resources/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.interceptors;
-
-import java.io.OutputStream;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.cxf.helpers.CastUtils;
-import org.apache.cxf.interceptor.LoggingMessage;
-import org.apache.cxf.io.CacheAndWriteOutputStream;
-import org.apache.cxf.io.CachedOutputStream;
-import org.apache.cxf.io.CachedOutputStreamCallback;
-import org.apache.cxf.jaxrs.interceptor.JAXRSOutInterceptor;
-import org.apache.cxf.message.Message;
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.util.AAIConfig;
-import org.onap.aai.util.AAIConstants;
-import org.onap.aai.util.FormatDate;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-// right after the request is complete, there may be content
-public class AAILogJAXRSOutInterceptor extends JAXRSOutInterceptor {
-
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAILogJAXRSOutInterceptor.class);
-
- protected final String COMPONENT = "aairest";
- protected final String CAMEL_REQUEST = "CamelHttpUrl";
-
- /**
- * {@inheritDoc}
- */
- public void handleMessage(Message message) {
-
- String fullId = (String) message.getExchange().get(LoggingMessage.ID_KEY);
-
- Map<String, List<String>> headers = CastUtils.cast((Map<?, ?>) message.get(Message.PROTOCOL_HEADERS));
- if (headers == null) {
- headers = new HashMap<String, List<String>>();
- }
-
- headers.put("X-AAI-TXID", Collections.singletonList(fullId));
- message.put(Message.PROTOCOL_HEADERS, headers);
-
- Message outMessage = message.getExchange().getOutMessage();
- final OutputStream os = outMessage.getContent(OutputStream.class);
- if (os == null) {
- return;
- }
-
- // we only want to register the callback if there is good reason for it.
- if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED") || message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
-
- final CacheAndWriteOutputStream newOut = new CacheAndWriteOutputStream(os);
- message.setContent(OutputStream.class, newOut);
- newOut.registerCallback(new LoggingCallback(message, os));
- }
-
- }
-
- class LoggingCallback implements CachedOutputStreamCallback {
-
- private final Message message;
- private final OutputStream origStream;
-
- public LoggingCallback(final Message msg, final OutputStream os) {
- this.message = msg;
- this.origStream = os;
- }
-
- public void onFlush(CachedOutputStream cos) {
-
- }
-
- public void onClose(CachedOutputStream cos) {
-
- String getValue = "";
- String postValue = "";
- String logValue = "";
-
- try {
- logValue = AAIConfig.get("aai.transaction.logging");
- getValue = AAIConfig.get("aai.transaction.logging.get");
- postValue = AAIConfig.get("aai.transaction.logging.post");
- } catch (AAIException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED") && !message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
- return;
- }
-
- String fullId = (String) message.getExchange().get(LoggingMessage.ID_KEY);
-
- Message inMessage = message.getExchange().getInMessage();
- String transId = null;
- String fromAppId = null;
-
- Map<String, List<String>> headersList = CastUtils.cast((Map<?, ?>) inMessage.get(Message.PROTOCOL_HEADERS));
- if (headersList != null) {
- List<String> xt = headersList.get("X-TransactionId");
- if (xt != null) {
- for (String transIdValue : xt) {
- transId = transIdValue;
- }
- }
- List<String> fa = headersList.get("X-FromAppId");
- if (fa != null) {
- for (String fromAppIdValue : fa) {
-
- fromAppId = fromAppIdValue;
- }
- }
- }
-
- String httpMethod = (String) inMessage.get(Message.HTTP_REQUEST_METHOD);
-
- String uri = (String) inMessage.get(CAMEL_REQUEST);
- String fullUri = uri;
- if (uri != null) {
- String query = (String) message.get(Message.QUERY_STRING);
- if (query != null) {
- fullUri = uri + "?" + query;
- }
- }
-
- String request = (String) message.getExchange().get(fullId + "_REQUEST");
-
- Message outMessage = message.getExchange().getOutMessage();
-
- final LoggingMessage buffer = new LoggingMessage("OUTMessage", fullId);
-
- // should we check this, and make sure it's not an error?
- Integer responseCode = (Integer) outMessage.get(Message.RESPONSE_CODE);
- if (responseCode == null) {
- responseCode = 200; // this should never happen, but just in
- // case we don't get one
- }
- buffer.getResponseCode().append(responseCode);
-
- String encoding = (String) outMessage.get(Message.ENCODING);
-
- if (encoding != null) {
- buffer.getEncoding().append(encoding);
- }
-
- String ct = (String) outMessage.get(Message.CONTENT_TYPE);
- if (ct != null) {
- buffer.getContentType().append(ct);
- }
-
- Object headers = outMessage.get(Message.PROTOCOL_HEADERS);
- if (headers != null) {
- buffer.getHeader().append(headers);
- }
-
- Boolean ss = false;
- if (responseCode >= 200 && responseCode <= 299) {
- ss = true;
- }
- String response = buffer.toString();
-
- // this should have been set by the in interceptor
- String rqstTm = (String) message.getExchange().get("AAI_RQST_TM");
-
- // just in case it wasn't, we'll put this here. not great, but it'll
- // have a val.
- if (rqstTm == null) {
- rqstTm = genDate();
- }
-
-
- String respTm = genDate();
-
- try {
- String actualRequest = request;
- StringBuilder builder = new StringBuilder();
- cos.writeCacheTo(builder, 100000);
- // here comes my xml:
- String payload = builder.toString();
-
- String actualResponse = response;
- if (payload == null) {
-
- } else {
- actualResponse = response + payload;
- }
-
- // we only log to AAI log if it's eanbled in the config props
- // file
- if (message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
-
- if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGREQUEST")) {
-
- // strip newlines from request
- String traceRequest = actualRequest;
- traceRequest = traceRequest.replace("\n", " ");
- traceRequest = traceRequest.replace("\r", "");
- traceRequest = traceRequest.replace("\t", "");
- LOGGER.debug(traceRequest);
- }
- if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGRESPONSE")) {
- // strip newlines from response
- String traceResponse = actualResponse;
- traceResponse = traceResponse.replace("\n", " ");
- traceResponse = traceResponse.replace("\r", "");
- traceResponse = traceResponse.replace("\t", "");
-
- LOGGER.debug(traceResponse);
- }
- }
-
- // we only log to HBASE if it's enabled in the config props file
- // TODO: pretty print XML/JSON. we might need to get the payload
- // and envelope seperately
- if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED")) {
- if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGREQUEST")) {
- actualRequest = "loggingDisabled";
- }
- if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGRESPONSE")) {
- actualResponse = "loggingDisabled";
- }
-
- LOGGER.debug("action={}, urlin={}, HbTransId={}", httpMethod, fullUri, fullId);
-
- if (logValue.equals("false")) {
- } else if (getValue.equals("false") && httpMethod.equals("GET")) {
- } else if (postValue.equals("false") && httpMethod.equals("POST")) {
- } else {
- putTransaction(transId, responseCode.toString(), rqstTm, respTm, fromAppId + ":" + transId, fullUri, httpMethod, request, response, actualResponse);
-
- }
- }
- } catch (Exception ex) {
- // ignore
- }
-
- message.setContent(OutputStream.class, origStream);
-
- LOGGER.auditEvent("HTTP Response Code: {}", responseCode.toString());
- }
-
- }
-
- protected String genDate() {
- FormatDate fd = new FormatDate( "YYMMdd-HH:mm:ss:SSS");
- return fd.getDateTime();
- }
-
- public String putTransaction(String tid, String status, String rqstTm, String respTm, String srcId, String rsrcId, String rsrcType, String rqstBuf, String respBuf, String actualResponse) {
- String tm = null;
-
- if (tid == null || "".equals(tid)) {
- tm = this.genDate();
- tid = tm + "-";
- }
-
- String htid = tid;
-
- if (rqstTm == null || "".equals(rqstTm)) {
- rqstTm = tm;
- }
-
- if (respTm == null || "".equals(respTm)) {
- respTm = tm;
- }
-
- try {
- LOGGER.debug(" transactionId:" + tid + " status: " + status + " rqstDate: " + rqstTm + " respDate: " + respTm + " sourceId: " + srcId + " resourceId: "
- + rsrcId + " resourceType: " + rsrcType + " payload rqstBuf: " + rqstBuf + " payload respBuf: " + respBuf + " Payload Error Messages: " + actualResponse);
- return htid;
- } catch (Exception e) {
- ErrorLogHelper.logError("AAI_4000", "Exception updating HBase:");
- return htid;
- }
-
- }
-}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java b/aai-resources/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java
deleted file mode 100644
index 360ebe4..0000000
--- a/aai-resources/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.interceptors;
-
-import java.util.Map;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.onap.aai.logging.LoggingContext;
-
-import ajsc.beans.interceptors.AjscInterceptor;
-
-public class PreAaiAjscInterceptor implements AjscInterceptor {
- private final static String TARGET_ENTITY = "aai-resources";
- private static class LazyAaiAjscInterceptor {
- public static final PreAaiAjscInterceptor INSTANCE = new PreAaiAjscInterceptor();
- }
-
- public static PreAaiAjscInterceptor getInstance() {
- return LazyAaiAjscInterceptor.INSTANCE;
- }
-
- @Override
- public boolean allowOrReject(HttpServletRequest req, HttpServletResponse resp, Map<?, ?> paramMap)
- throws Exception {
-
- LoggingContext.init();
- String serviceName = req.getMethod() + " " + req.getRequestURI().toString();
- String queryStr = req.getQueryString();
- if ( queryStr != null ) {
- serviceName = serviceName + "?" + queryStr;
- }
- LoggingContext.partnerName(req.getHeader("X-FromAppId"));
- LoggingContext.serviceName(serviceName);
- LoggingContext.targetEntity(TARGET_ENTITY);
- LoggingContext.targetServiceName(serviceName);
- LoggingContext.requestId(req.getHeader("X-TransactionId"));
-
- return true;
- }
-}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/package-info.java b/aai-resources/src/main/java/org/onap/aai/interceptors/package-info.java
new file mode 100644
index 0000000..0af4afd
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/package-info.java
@@ -0,0 +1,38 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+/**
+ * <b>Interceptors</b> package is subdivided to pre and post interceptors
+ * If you want to add an additional interceptor you would need to add
+ * the priority level to AAIRequestFilterPriority or AAIResponsePriority
+ * to give a value which indicates the order in which the interceptor
+ * will be triggered and also you will add that value like here
+ *
+ * <pre>
+ * <code>
+ * @Priority(AAIRequestFilterPriority.YOUR_PRIORITY)
+ * public class YourInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+ *
+ * }
+ * </code>
+ * </pre>
+ */
+package org.onap.aai.interceptors;
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java b/aai-resources/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
new file mode 100644
index 0000000..db05b30
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
@@ -0,0 +1,34 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.post;
+
+public final class AAIResponseFilterPriority {
+
+ private AAIResponseFilterPriority() {}
+
+ public static final int HEADER_MANIPULATION = 1000;
+
+ public static final int RESPONSE_TRANS_LOGGING = 2000;
+
+ public static final int RESET_LOGGING_CONTEXT = 3000;
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
index 2a34774..592c3fc 100644
--- a/aai-resources/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
@@ -19,47 +19,53 @@
*
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*/
-package org.onap.aai.interceptors;
+package org.onap.aai.interceptors.post;
-import java.util.Map;
+import java.io.IOException;
+import javax.annotation.Priority;
import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.logging.LoggingContext;
import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.springframework.beans.factory.annotation.Autowired;
+
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import ajsc.beans.interceptors.AjscInterceptor;
+@Priority(AAIResponseFilterPriority.RESET_LOGGING_CONTEXT)
+public class ResetLoggingContext extends AAIContainerFilter implements ContainerResponseFilter {
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ResetLoggingContext.class);
-public class PostAaiAjscInterceptor implements AjscInterceptor {
+ @Autowired
+ private HttpServletRequest httpServletRequest;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+ throws IOException {
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(PostAaiAjscInterceptor.class);
+ this.cleanLoggingContext();
- private static class LazyAaiAjscInterceptor {
- public static final PostAaiAjscInterceptor INSTANCE = new PostAaiAjscInterceptor();
}
- public static PostAaiAjscInterceptor getInstance() {
- return LazyAaiAjscInterceptor.INSTANCE;
- }
+ private void cleanLoggingContext() {
+ final String responseCode = LoggingContext.responseCode();
+ String url = httpServletRequest.getRequestURL().toString();
- @Override
- public boolean allowOrReject(HttpServletRequest req, HttpServletResponse resp, Map<?, ?> paramMap)
- throws Exception {
-
- final int httpStatusCode = resp.getStatus();
- LoggingContext.responseCode(Integer.toString(httpStatusCode));
- if ( httpStatusCode < 200 || httpStatusCode > 299 ) {
+ if (responseCode != null && responseCode.startsWith("ERR.")) {
LoggingContext.statusCode(StatusCode.ERROR);
- LOGGER.error(req.getRequestURL() + " call failed with responseCode=" + httpStatusCode);
- }
- else {
+ LOGGER.error(url + " call failed with responseCode=" + responseCode);
+ } else {
LoggingContext.statusCode(StatusCode.COMPLETE);
- LOGGER.info(req.getRequestURL() + " call succeeded");
+ LOGGER.info(url + " call succeeded");
}
+
LoggingContext.clear();
- return true;
}
+
}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
new file mode 100644
index 0000000..e3cb35b
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
@@ -0,0 +1,51 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.post;
+
+import java.io.IOException;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+
+@Priority(AAIResponseFilterPriority.HEADER_MANIPULATION)
+public class ResponseHeaderManipulation extends AAIContainerFilter implements ContainerResponseFilter {
+
+
+ @Override
+ public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+ throws IOException {
+
+ updateResponseHeaders(requestContext, responseContext);
+
+ }
+
+ private void updateResponseHeaders(ContainerRequestContext requestContext,
+ ContainerResponseContext responseContext) {
+ responseContext.getHeaders().add(AAIHeaderProperties.AAI_TX_ID, requestContext.getProperty(AAIHeaderProperties.AAI_TX_ID));
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
new file mode 100644
index 0000000..964c436
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
@@ -0,0 +1,127 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.post;
+
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Optional;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonObject;
+
+@Priority(AAIResponseFilterPriority.RESPONSE_TRANS_LOGGING)
+public class ResponseTransactionLogging extends AAIContainerFilter implements ContainerResponseFilter {
+
+ private static final EELFLogger TRANSACTION_LOGGER = EELFManager.getInstance().getLogger(ResponseTransactionLogging.class);
+
+ @Autowired
+ private HttpServletResponse httpServletResponse;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+ throws IOException {
+
+ this.transLogging(requestContext, responseContext);
+
+ }
+
+ private void transLogging(ContainerRequestContext requestContext, ContainerResponseContext responseContext) {
+
+ String logValue;
+ String getValue;
+ String postValue;
+
+ try {
+ logValue = AAIConfig.get("aai.transaction.logging");
+ getValue = AAIConfig.get("aai.transaction.logging.get");
+ postValue = AAIConfig.get("aai.transaction.logging.post");
+ } catch (AAIException e) {
+ return;
+ }
+
+ String transId = requestContext.getHeaderString(AAIHeaderProperties.TRANSACTION_ID);
+ String fromAppId = requestContext.getHeaderString(AAIHeaderProperties.FROM_APP_ID);
+ String fullUri = requestContext.getUriInfo().getRequestUri().toString();
+ String requestTs = (String)requestContext.getProperty(AAIHeaderProperties.AAI_REQUEST_TS);
+
+ String httpMethod = requestContext.getMethod();
+
+ String status = Integer.toString(responseContext.getStatus());
+
+ String request = (String)requestContext.getProperty(AAIHeaderProperties.AAI_REQUEST);
+ String response = this.getResponseString(responseContext);
+
+ if (!Boolean.parseBoolean(logValue)) {
+ } else if (!Boolean.parseBoolean(getValue) && "GET".equals(httpMethod)) {
+ } else if (!Boolean.parseBoolean(postValue) && "POST".equals(httpMethod)) {
+ } else {
+
+ JsonObject logEntry = new JsonObject();
+ logEntry.addProperty("transactionId", transId);
+ logEntry.addProperty("status", status);
+ logEntry.addProperty("rqstDate", requestTs);
+ logEntry.addProperty("respDate", this.genDate());
+ logEntry.addProperty("sourceId", fromAppId + ":" + transId);
+ logEntry.addProperty("resourceId", fullUri);
+ logEntry.addProperty("resourceType", httpMethod);
+ logEntry.addProperty("rqstBuf", Objects.toString(request, ""));
+ logEntry.addProperty("respBuf", Objects.toString(response, ""));
+
+ try {
+ TRANSACTION_LOGGER.debug(logEntry.toString());
+ } catch (Exception e) {
+ ErrorLogHelper.logError("AAI_4000", "Exception writing transaction log.");
+ }
+ }
+
+ }
+
+ private String getResponseString(ContainerResponseContext responseContext) {
+ JsonObject response = new JsonObject();
+ response.addProperty("ID", responseContext.getHeaderString(AAIHeaderProperties.AAI_TX_ID));
+ response.addProperty("Content-Type", this.httpServletResponse.getContentType());
+ response.addProperty("Response-Code", responseContext.getStatus());
+ response.addProperty("Headers", responseContext.getHeaders().toString());
+ Optional<Object> entityOptional = Optional.ofNullable(responseContext.getEntity());
+ if(entityOptional.isPresent()){
+ response.addProperty("Entity", entityOptional.get().toString());
+ } else {
+ response.addProperty("Entity", "");
+ }
+ return response.toString();
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
index a1cc2ca..823a5e6 100644
--- a/aai-resources/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
@@ -19,37 +19,21 @@
*
* ECOMP is a trademark and service mark of AT&T Intellectual Property.
*/
-package org.onap.aai.ajsc_aai;
+package org.onap.aai.interceptors.pre;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import java.util.Map;
-import java.util.HashMap;
-
-@Path("/user")
-public class JaxrsUserService {
+public final class AAIRequestFilterPriority {
+
+ private AAIRequestFilterPriority() {}
- private static final Map<String,String> userIdToNameMap;
- static {
- userIdToNameMap = new HashMap<String,String>();
- userIdToNameMap.put("userID1","Name1");
- userIdToNameMap.put("userID2","Name2");
- }
+ public static final int REQUEST_TRANS_LOGGING = 1000;
- /**
- * Lookup user.
- *
- * @param userId the user id
- * @return the string
- */
- @GET
- @Path("/{userId}")
- @Produces("text/plain")
- public String lookupUser(@PathParam("userId") String userId) {
- String name = userIdToNameMap.get(userId);
- return name != null ? name : "unknown id";
- }
-
+ public static final int HEADER_VALIDATION = 2000;
+
+ public static final int SET_LOGGING_CONTEXT = 3000;
+
+ public static final int AUTHORIZATION = 4000;
+
+ public static final int HEADER_MANIPULATION = 5000;
+
+ public static final int REQUEST_MODIFICATION = 6000;
}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
new file mode 100644
index 0000000..4a7e10a
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
@@ -0,0 +1,89 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.pre;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.HEADER_VALIDATION)
+public class HeaderValidation extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+
+ Optional<Response> oResp;
+
+ String transId = requestContext.getHeaderString(AAIHeaderProperties.TRANSACTION_ID);
+ String fromAppId = requestContext.getHeaderString(AAIHeaderProperties.FROM_APP_ID);
+
+ List<MediaType> acceptHeaderValues = requestContext.getAcceptableMediaTypes();
+
+ oResp = this.validateHeaderValuePresence(fromAppId, "AAI_4009", acceptHeaderValues);
+ if (oResp.isPresent()) {
+ requestContext.abortWith(oResp.get());
+ return;
+ }
+ oResp = this.validateHeaderValuePresence(transId, "AAI_4010", acceptHeaderValues);
+ if (oResp.isPresent()) {
+ requestContext.abortWith(oResp.get());
+ return;
+ }
+
+ if (!this.isValidUUID(transId)) {
+ transId = UUID.randomUUID().toString();
+ requestContext.getHeaders().get(AAIHeaderProperties.TRANSACTION_ID).clear();
+ requestContext.getHeaders().add(AAIHeaderProperties.TRANSACTION_ID, transId);
+ }
+
+ }
+
+ private Optional<Response> validateHeaderValuePresence(String value, String errorCode,
+ List<MediaType> acceptHeaderValues) {
+ Response response = null;
+ AAIException aaie;
+ if (value == null) {
+ aaie = new AAIException(errorCode);
+ return Optional.of(Response.status(aaie.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(acceptHeaderValues, aaie, new ArrayList<>()))
+ .build());
+ }
+
+ return Optional.ofNullable(response);
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
new file mode 100644
index 0000000..3d3e6e0
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
@@ -0,0 +1,72 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.pre;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.springframework.beans.factory.annotation.Autowired;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.HEADER_MANIPULATION)
+public class RequestHeaderManipulation extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Autowired
+ private HttpServletRequest httpServletRequest;
+
+ private static final Pattern versionedEndpoint = Pattern.compile("^/aai/(v\\d+)");
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+
+ String uri = httpServletRequest.getRequestURI();
+ this.addRequestContext(uri, requestContext.getHeaders());
+
+ }
+
+ private void addRequestContext(String uri, MultivaluedMap<String, String> requestHeaders) {
+
+ String rc = "";
+
+ Matcher match = versionedEndpoint.matcher(uri);
+ if (match.find()) {
+ rc = match.group(1);
+ }
+
+ if (requestHeaders.containsKey(AAIHeaderProperties.REQUEST_CONTEXT)) {
+ requestHeaders.remove(AAIHeaderProperties.REQUEST_CONTEXT);
+ }
+ requestHeaders.put(AAIHeaderProperties.REQUEST_CONTEXT, Collections.singletonList(rc));
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
new file mode 100644
index 0000000..812bf1b
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.pre;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.UriBuilder;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.HEADER_VALIDATION)
+public class RequestModification extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+
+ this.cleanDME2QueryParams(requestContext);
+
+ }
+
+ private void cleanDME2QueryParams(ContainerRequestContext request) {
+ UriBuilder builder = request.getUriInfo().getRequestUriBuilder();
+ MultivaluedMap<String, String> queries = request.getUriInfo().getQueryParameters();
+
+ String[] blacklist = { "version", "envContext", "routeOffer" };
+ Set<String> blacklistSet = Arrays.stream(blacklist).collect(Collectors.toSet());
+
+ boolean remove = true;
+
+ for (String param : blacklistSet) {
+ if (!queries.containsKey(param)) {
+ remove = false;
+ break;
+ }
+ }
+
+ if (remove) {
+ for (Map.Entry<String, List<String>> query : queries.entrySet()) {
+ String key = query.getKey();
+ if (blacklistSet.contains(key)) {
+ builder.replaceQueryParam(key);
+ }
+ }
+ }
+ request.setRequestUri(builder.build());
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
new file mode 100644
index 0000000..75103f5
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.pre;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Random;
+import java.util.UUID;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+
+import org.glassfish.jersey.message.internal.ReaderWriter;
+import org.glassfish.jersey.server.ContainerException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.HbaseSaltPrefixer;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.google.gson.JsonObject;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.REQUEST_TRANS_LOGGING)
+public class RequestTransactionLogging extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Autowired
+ private HttpServletRequest httpServletRequest;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+
+ String currentTimeStamp = genDate();
+ String fullId = this.getAAITxIdToHeader(currentTimeStamp);
+ this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_TX_ID, fullId);
+ this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST, this.getRequest(requestContext, fullId));
+ this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST_TS, currentTimeStamp);
+ }
+
+ private void addToRequestContext(ContainerRequestContext requestContext, String name, String aaiTxIdToHeader) {
+ requestContext.setProperty(name, aaiTxIdToHeader);
+ }
+
+ private String getAAITxIdToHeader(String currentTimeStamp) {
+ String txId = UUID.randomUUID().toString();
+ try {
+ txId = HbaseSaltPrefixer.getInstance().prependSalt(AAIConfig.get(AAIConstants.AAI_NODENAME) + "-"
+ + currentTimeStamp + "-" + new Random(System.currentTimeMillis()).nextInt(99999));
+ } catch (AAIException e) {
+ }
+
+ return txId;
+ }
+
+ private String getRequest(ContainerRequestContext requestContext, String fullId) {
+
+ JsonObject request = new JsonObject();
+ request.addProperty("ID", fullId);
+ request.addProperty("Http-Method", requestContext.getMethod());
+ request.addProperty("Content-Type", httpServletRequest.getContentType());
+ request.addProperty("Headers", requestContext.getHeaders().toString());
+
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ InputStream in = requestContext.getEntityStream();
+
+ try {
+ if (in.available() > 0) {
+ ReaderWriter.writeTo(in, out);
+ byte[] requestEntity = out.toByteArray();
+ request.addProperty("Payload", new String(requestEntity, "UTF-8"));
+ requestContext.setEntityStream(new ByteArrayInputStream(requestEntity));
+ }
+ } catch (IOException ex) {
+ throw new ContainerException(ex);
+ }
+
+ return request.toString();
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
new file mode 100644
index 0000000..5c6a5e0
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
@@ -0,0 +1,71 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors.pre;
+
+import java.io.IOException;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.LoggingContext;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.env.Environment;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.SET_LOGGING_CONTEXT)
+public class SetLoggingContext extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Autowired
+ private Environment environment;
+
+ @Autowired
+ private HttpServletRequest httpServletRequest;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+
+ String uri = httpServletRequest.getRequestURI();
+ String queryString = httpServletRequest.getQueryString();
+
+ if(queryString != null && !queryString.isEmpty()){
+ uri = uri + "?" + queryString;
+ }
+
+ String httpMethod = requestContext.getMethod();
+ String transId = requestContext.getHeaderString(AAIHeaderProperties.TRANSACTION_ID);
+ String fromAppId = requestContext.getHeaderString(AAIHeaderProperties.FROM_APP_ID);
+
+ LoggingContext.init();
+ LoggingContext.requestId(transId);
+ LoggingContext.partnerName(fromAppId);
+ LoggingContext.targetEntity(environment.getProperty("spring.application.name"));
+ LoggingContext.component(fromAppId);
+ LoggingContext.serviceName(httpMethod + " " + uri);
+ LoggingContext.targetServiceName(httpMethod + " " + uri);
+ }
+
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/rest/LegacyMoxyConsumer.java b/aai-resources/src/main/java/org/onap/aai/rest/LegacyMoxyConsumer.java
index 0c2ef9c..fbfa59e 100644
--- a/aai-resources/src/main/java/org/onap/aai/rest/LegacyMoxyConsumer.java
+++ b/aai-resources/src/main/java/org/onap/aai/rest/LegacyMoxyConsumer.java
@@ -24,9 +24,12 @@ package org.onap.aai.rest;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
+import java.util.Map.Entry;
import java.util.Set;
+import java.util.stream.Collectors;
import java.util.concurrent.Callable;
import javax.servlet.http.HttpServletRequest;
@@ -43,12 +46,13 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
-import org.apache.cxf.jaxrs.ext.PATCH;
+import io.swagger.jaxrs.PATCH;
import org.javatuples.Pair;
import org.onap.aai.dbmap.DBConnectionType;
import org.onap.aai.exceptions.AAIException;
@@ -240,7 +244,7 @@ public class LegacyMoxyConsumer extends RESTAPI {
@Path("/{uri: .+}")
@Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public Response getLegacy (String content, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("all") @QueryParam("depth") String depthParam, @DefaultValue("false") @QueryParam("cleanup") String cleanUp, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ public Response getLegacy (String content, @DefaultValue("-1") @QueryParam("resultIndex") String resultIndex, @DefaultValue("-1") @QueryParam("resultSize") String resultSize, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("all") @QueryParam("depth") String depthParam, @DefaultValue("false") @QueryParam("cleanup") String cleanUp, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
return runner(AAIConstants.AAI_CRUD_TIMEOUT_ENABLED,
AAIConstants.AAI_CRUD_TIMEOUT_APP,
AAIConstants.AAI_CRUD_TIMEOUT_LIMIT,
@@ -250,7 +254,7 @@ public class LegacyMoxyConsumer extends RESTAPI {
new Callable<Response>() {
@Override
public Response call() {
- return getLegacy(content, versionParam, uri, depthParam, cleanUp, headers, info, req, new HashSet<String>());
+ return getLegacy(content, versionParam, uri, depthParam, cleanUp, headers, info, req, new HashSet<String>(), resultIndex, resultSize);
}
}
);
@@ -270,7 +274,7 @@ public class LegacyMoxyConsumer extends RESTAPI {
* @param removeQueryParams
* @return
*/
- public Response getLegacy(String content, String versionParam, String uri, String depthParam, String cleanUp, HttpHeaders headers, UriInfo info, HttpServletRequest req, Set<String> removeQueryParams) {
+ public Response getLegacy(String content, String versionParam, String uri, String depthParam, String cleanUp, HttpHeaders headers, UriInfo info, HttpServletRequest req, Set<String> removeQueryParams, String resultIndex, String resultSize) {
String sourceOfTruth = headers.getRequestHeaders().getFirst("X-FromAppId");
String transId = headers.getRequestHeaders().getFirst("X-TransactionId");
String realTime = headers.getRequestHeaders().getFirst("Real-Time");
@@ -288,7 +292,7 @@ public class LegacyMoxyConsumer extends RESTAPI {
LoggingContext.serviceName(serviceName);
LoggingContext.targetEntity(TARGET_ENTITY);
LoggingContext.targetServiceName(serviceName);
-
+
try {
validateRequest(info);
Version version = Version.valueOf(versionParam);
@@ -298,37 +302,27 @@ public class LegacyMoxyConsumer extends RESTAPI {
loader = httpEntry.getLoader();
MultivaluedMap<String, String> params = info.getQueryParameters();
- RemoveDME2QueryParams dme2Workaround = new RemoveDME2QueryParams();
- //clear out all params not used for filtering
- params.remove("depth");
- params.remove("cleanup");
- params.remove("nodes-only");
- for (String queryParam : removeQueryParams) {
- params.remove(queryParam);
- }
- if (dme2Workaround.shouldRemoveQueryParams(params)) {
- dme2Workaround.removeQueryParams(params);
- }
+ params = removeNonFilterableParams(params);
uri = uri.split("\\?")[0];
-
+
URI uriObject = UriBuilder.fromPath(uri).build();
QueryParser uriQuery = dbEngine.getQueryBuilder().createQueryFromURI(uriObject, params);
String objType = "";
- if (!uriQuery.getContainerType().equals("")) {
- objType = uriQuery.getContainerType();
- } else {
- objType = uriQuery.getResultType();
- }
- Introspector obj = loader.introspectorFromName(objType);
- DBRequest request =
+ if (!uriQuery.getContainerType().equals("")) {
+ objType = uriQuery.getContainerType();
+ } else {
+ objType = uriQuery.getResultType();
+ }
+ Introspector obj = loader.introspectorFromName(objType);
+ DBRequest request =
new DBRequest.Builder(HttpMethod.GET, uriObject, uriQuery, obj, headers, info, transId).build();
List<DBRequest> requests = new ArrayList<>();
requests.add(request);
- Pair<Boolean, List<Pair<URI, Response>>> responsesTuple = httpEntry.process(requests, sourceOfTruth);
-
+ Pair<Boolean, List<Pair<URI, Response>>> responsesTuple = httpEntry.process(requests, sourceOfTruth);
+
response = responsesTuple.getValue1().get(0).getValue1();
} catch (AAIException e) {
@@ -349,6 +343,21 @@ public class LegacyMoxyConsumer extends RESTAPI {
return response;
}
+
+ private MultivaluedMap<String, String> removeNonFilterableParams(MultivaluedMap<String, String> params) {
+
+ String[] toRemove = { "depth", "cleanup", "nodes-only", "format", "resultIndex", "resultSize"};
+ Set<String> toRemoveSet = Arrays.stream(toRemove).collect(Collectors.toSet());
+
+ MultivaluedMap<String, String> cleanedParams = new MultivaluedHashMap<>();
+ params.keySet().stream().forEach(k -> {
+ if (!toRemoveSet.contains(k)) {
+ cleanedParams.addAll(k, params.get(k));
+ }
+ });
+
+ return cleanedParams;
+ }
/**
* Delete.
*
@@ -584,7 +593,7 @@ public class LegacyMoxyConsumer extends RESTAPI {
LoggingContext.requestId(transId);
LoggingContext.partnerName(sourceOfTruth);
LoggingContext.targetEntity(TARGET_ENTITY);
-
+
try {
validateRequest(info);
diff --git a/aai-resources/src/main/java/org/onap/aai/rest/bulk/BulkUriInfo.java b/aai-resources/src/main/java/org/onap/aai/rest/bulk/BulkUriInfo.java
index 2bff084..3153a1c 100644
--- a/aai-resources/src/main/java/org/onap/aai/rest/bulk/BulkUriInfo.java
+++ b/aai-resources/src/main/java/org/onap/aai/rest/bulk/BulkUriInfo.java
@@ -3,16 +3,12 @@ package org.onap.aai.rest.bulk;
import java.net.URI;
import java.util.List;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.PathSegment;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.*;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
public class BulkUriInfo implements UriInfo {
- private MultivaluedMap<String, String> queryParams= new MultivaluedMapImpl();
+ private MultivaluedMap<String, String> queryParams= new MultivaluedHashMap<>();
@Override
public String getPath() {
@@ -98,7 +94,17 @@ public class BulkUriInfo implements UriInfo {
public List<Object> getMatchedResources() {
return null;
}
-
+
+ @Override
+ public URI resolve(URI uri) {
+ return null;
+ }
+
+ @Override
+ public URI relativize(URI uri) {
+ return null;
+ }
+
public void addParams(String key, List<String> list) {
this.queryParams.put(key, list);
}
diff --git a/aai-resources/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java b/aai-resources/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
index 0188142..6a3b0a5 100644
--- a/aai-resources/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
+++ b/aai-resources/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
@@ -35,8 +35,7 @@ import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
-import org.apache.cxf.jaxrs.ext.PATCH;
-
+import io.swagger.jaxrs.PATCH;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.restcore.RESTAPI;
diff --git a/aai-resources/src/main/java/org/onap/aai/rest/util/EchoResponse.java b/aai-resources/src/main/java/org/onap/aai/rest/util/EchoResponse.java
index 55a07e4..b1e156c 100644
--- a/aai-resources/src/main/java/org/onap/aai/rest/util/EchoResponse.java
+++ b/aai-resources/src/main/java/org/onap/aai/rest/util/EchoResponse.java
@@ -21,8 +21,9 @@
*/
package org.onap.aai.rest.util;
-import java.util.ArrayList;
-import java.util.HashMap;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
@@ -34,14 +35,13 @@ import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
-
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.restcore.RESTAPI;
+import java.util.ArrayList;
+import java.util.HashMap;
/**
* The Class EchoResponse.
*/
+@Path("/util")
public class EchoResponse extends RESTAPI {
protected static String authPolicyFunctionName = "util";
@@ -60,7 +60,7 @@ public class EchoResponse extends RESTAPI {
*/
@GET
@Produces( { MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
- @Path(echoPath)
+ @Path("/echo")
public Response echoResult(@Context HttpHeaders headers, @Context HttpServletRequest req,
@QueryParam("action") String myAction) {
Response response = null;
diff --git a/aai-resources/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java b/aai-resources/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
deleted file mode 100644
index 0fcce0b..0000000
--- a/aai-resources/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
-package org.onap.aai.util;
-
-import java.io.IOException;
-import java.util.UUID;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import org.onap.aai.dbmap.AAIGraph;
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.introspection.ModelInjestor;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.logging.LogFormatTools;
-import org.onap.aai.logging.LoggingContext;
-import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.onap.aai.migration.MigrationControllerInternal;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-public class AAIAppServletContextListener implements ServletContextListener {
-
- private static final String ACTIVEMQ_TCP_URL = "tcp://localhost:61447";
-
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAIAppServletContextListener.class.getName());
-
- /**
- * Destroys Context
- *
- * @param arg0 the ServletContextEvent
- */
- public void contextDestroyed(ServletContextEvent arg0) {
- }
-
- /**
- * Initializes Context
- *
- * @param arg0 the ServletContextEvent
- */
- public void contextInitialized(ServletContextEvent arg0) {
- System.setProperty("org.onap.aai.serverStarted", "false");
- System.setProperty("aai.service.name", "resources");
-
- LoggingContext.save();
- LoggingContext.component("init");
- LoggingContext.partnerName("NA");
- LoggingContext.targetEntity("aai-resources");
- LoggingContext.requestId(UUID.randomUUID().toString());
- LoggingContext.serviceName("aai-resources");
- LoggingContext.targetServiceName("contextInitialized");
- LoggingContext.statusCode(StatusCode.COMPLETE);
-
- LOGGER.info("AAI Server initialization started...");
- try {
- LOGGER.info("Loading aaiconfig.properties");
- AAIConfig.init();
-
- LOGGER.info("Loading error.properties");
- ErrorLogHelper.loadProperties();
-
- LOGGER.info("Loading graph database");
-
- AAIGraph.getInstance();
- ModelInjestor.getInstance();
-
- // Jsm internal broker for aai events
-
- LOGGER.info("A&AI Server initialization succcessful.");
- System.setProperty("activemq.tcp.url", ACTIVEMQ_TCP_URL);
- System.setProperty("org.onap.aai.serverStarted", "true");
- if ("true".equals(AAIConfig.get("aai.run.migrations", "false"))) {
- MigrationControllerInternal migrations = new MigrationControllerInternal();
- migrations.run(new String[]{"--commit"});
- }
-
- Runtime.getRuntime().addShutdownHook(new Thread() {
- public void run() {
- LOGGER.info("AAIGraph shutting down");
- AAIGraph.getInstance().graphShutdown();
- LOGGER.info("AAIGraph shutdown");
- System.out.println("Shutdown hook triggered.");
- }
- });
-
- } catch (AAIException e) {
- ErrorLogHelper.logException(e);
- throw new RuntimeException("AAIException caught while initializing A&AI server", e);
- } catch (IOException e) {
- ErrorLogHelper.logError("AAI_4000", e.getMessage());
- throw new RuntimeException("IOException caught while initializing A&AI server", e);
- } catch (Exception e) {
- LOGGER.error("Unknown failure while initializing A&AI Server " + LogFormatTools.getStackTop(e));
- throw new RuntimeException("Unknown failure while initializing A&AI server", e);
- }
-
- LOGGER.info("Resources MicroService Started");
- LOGGER.debug("Resources MicroService Started");
- LoggingContext.restore();
- }
-}
diff --git a/aai-resources/src/main/java/org/onap/aai/web/JerseyConfiguration.java b/aai-resources/src/main/java/org/onap/aai/web/JerseyConfiguration.java
new file mode 100644
index 0000000..8863d79
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/web/JerseyConfiguration.java
@@ -0,0 +1,151 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.web;
+
+import org.glassfish.jersey.filter.LoggingFilter;
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.servlet.ServletProperties;
+import org.onap.aai.rest.*;
+import org.onap.aai.rest.retired.V7V8Models;
+import org.onap.aai.rest.retired.V7V8NamedQueries;
+import org.onap.aai.rest.tools.ModelVersionTransformer;
+import org.onap.aai.rest.util.EchoResponse;
+import org.reflections.Reflections;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Priority;
+import javax.ws.rs.ApplicationPath;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.ContainerResponseFilter;
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
+@Component
+@ApplicationPath("/aai")
+public class JerseyConfiguration extends ResourceConfig {
+
+ private static final Logger log = Logger.getLogger(JerseyConfiguration.class.getName());
+
+ private Environment env;
+
+ @Autowired
+ public JerseyConfiguration(Environment env) {
+
+ this.env = env;
+
+ register(EchoResponse.class);
+ register(VertexIdConsumer.class);
+ register(ExampleConsumer.class);
+ register(BulkAddConsumer.class);
+ register(BulkProcessConsumer.class);
+ register(LegacyMoxyConsumer.class);
+ register(URLFromVertexIdConsumer.class);
+ register(V7V8Models.class);
+ register(V7V8NamedQueries.class);
+ register(ModelVersionTransformer.class);
+
+ //Request Filters
+ registerFiltersForRequests();
+ // Response Filters
+ registerFiltersForResponses();
+
+ property(ServletProperties.FILTER_FORWARD_ON_404, true);
+
+ // Following registers the request headers and response headers
+ // If the LoggingFilter second argument is set to true, it will print response value as well
+ if ("true".equalsIgnoreCase(env.getProperty("aai.request.logging.enabled"))) {
+ register(new LoggingFilter(log, false));
+ }
+ }
+
+ public void registerFiltersForRequests() {
+
+ // Find all the classes within the interceptors package
+ Reflections reflections = new Reflections("org.onap.aai.interceptors");
+ // Filter them based on the clazz that was passed in
+ Set<Class<? extends ContainerRequestFilter>> filters = reflections.getSubTypesOf(ContainerRequestFilter.class);
+
+
+ // Check to ensure that each of the filter has the @Priority annotation and if not throw exception
+ for (Class filterClass : filters) {
+ if (filterClass.getAnnotation(Priority.class) == null) {
+ throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
+ }
+ }
+
+ // Turn the set back into a list
+ List<Class<? extends ContainerRequestFilter>> filtersList = filters
+ .stream()
+ .filter(f -> {
+ if (f.isAnnotationPresent(Profile.class)
+ && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+ return false;
+ }
+ return true;
+ })
+ .collect(Collectors.toList());
+
+ // Sort them by their priority levels value
+ filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
+
+ // Then register this to the jersey application
+ filtersList.forEach(this::register);
+ }
+
+ public void registerFiltersForResponses() {
+
+ // Find all the classes within the interceptors package
+ Reflections reflections = new Reflections("org.onap.aai.interceptors");
+ // Filter them based on the clazz that was passed in
+ Set<Class<? extends ContainerResponseFilter>> filters = reflections.getSubTypesOf(ContainerResponseFilter.class);
+
+
+ // Check to ensure that each of the filter has the @Priority annotation and if not throw exception
+ for (Class filterClass : filters) {
+ if (filterClass.getAnnotation(Priority.class) == null) {
+ throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
+ }
+ }
+
+ // Turn the set back into a list
+ List<Class<? extends ContainerResponseFilter>> filtersList = filters.stream()
+ .filter(f -> {
+ if (f.isAnnotationPresent(Profile.class)
+ && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+ return false;
+ }
+ return true;
+ })
+ .collect(Collectors.toList());
+
+ // Sort them by their priority levels value
+ filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
+
+ // Then register this to the jersey application
+ filtersList.forEach(this::register);
+ }
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/web/LocalHostAccessLog.java b/aai-resources/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
new file mode 100644
index 0000000..4201a79
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.web;
+
+import ch.qos.logback.access.jetty.RequestLogImpl;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
+import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
+import org.springframework.boot.context.embedded.jetty.JettyServerCustomizer;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Arrays;
+
+@Configuration
+public class LocalHostAccessLog {
+
+ @Bean
+ public EmbeddedServletContainerFactory jettyConfigBean(){
+ JettyEmbeddedServletContainerFactory jef = new JettyEmbeddedServletContainerFactory();
+ jef.addServerCustomizers((JettyServerCustomizer) server -> {
+
+ HandlerCollection handlers = new HandlerCollection();
+
+ Arrays.stream(server.getHandlers()).forEach(handlers::addHandler);
+
+ RequestLogHandler requestLogHandler = new RequestLogHandler();
+ requestLogHandler.setServer(server);
+
+ RequestLogImpl requestLogImpl = new RequestLogImpl();
+ requestLogImpl.setResource("/localhost-access-logback.xml");
+ requestLogImpl.start();
+
+ requestLogHandler.setRequestLog(requestLogImpl);
+ handlers.addHandler(requestLogHandler);
+ server.setHandler(handlers);
+ });
+ return jef;
+ }
+}
diff --git a/aai-resources/src/main/java/org/onap/aai/web/WebConfiguration.java b/aai-resources/src/main/java/org/onap/aai/web/WebConfiguration.java
new file mode 100644
index 0000000..aaa3998
--- /dev/null
+++ b/aai-resources/src/main/java/org/onap/aai/web/WebConfiguration.java
@@ -0,0 +1,48 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.web;
+
+//import org.springframework.context.annotation.Bean;
+//import org.springframework.context.annotation.Configuration;
+//import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
+//import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
+//
+//@Configuration
+//public class WebConfiguration {
+//
+// @Bean
+// public WebMvcConfigurerAdapter forwardToIndex() {
+// return new WebMvcConfigurerAdapter() {
+// @Override
+// public void addViewControllers(ViewControllerRegistry registry) {
+// registry.addViewController("/swagger").setViewName(
+// "redirect:/swagger/index.html");
+// registry.addViewController("/swagger/").setViewName(
+// "redirect:/swagger/index.html");
+// registry.addViewController("/docs").setViewName(
+// "redirect:/docs/html/index.html");
+// registry.addViewController("/docs/").setViewName(
+// "redirect:/docs/html/index.html");
+// }
+// };
+// }
+//}
diff --git a/aai-resources/src/main/jenkins/Jenkinsfile b/aai-resources/src/main/jenkins/Jenkinsfile
new file mode 100644
index 0000000..c74d439
--- /dev/null
+++ b/aai-resources/src/main/jenkins/Jenkinsfile
@@ -0,0 +1,31 @@
+node ("${BUILD_SLAVE}") {
+ // get the jenkinsfile root directory
+ def rootDir = pwd()
+
+ def JAVA_HOME = tool 'jdk180'
+ env.PATH = "${JAVA_HOME}/bin:${env.PATH}"
+ sh 'which java'
+ sh 'java -version'
+
+ env.DOCKER_HOST="tcp://localhost:4243"
+
+ // load external groovy scripts
+ def build
+ def checkout
+ def deploy
+ dir('tmp') {
+ git url: "${GIT_URL}", branch: "${GIT_BRANCH}"
+ checkout = load 'src/main/jenkins/checkout.groovy'
+ build = load 'src/main/jenkins/build.groovy'
+ deploy = load 'src/main/jenkins/deploy.groovy'
+ }
+
+ // check out code from git
+ checkout.gitCheckout()
+
+ // build the git project
+ build.buildProject()
+
+ deploy.deployService()
+
+} \ No newline at end of file
diff --git a/aai-resources/src/main/jenkins/build.groovy b/aai-resources/src/main/jenkins/build.groovy
new file mode 100644
index 0000000..3170510
--- /dev/null
+++ b/aai-resources/src/main/jenkins/build.groovy
@@ -0,0 +1,14 @@
+
+
+def buildProject() {
+ stage 'Build Git Project'
+ wrap([$class: 'ConfigFileBuildWrapper', managedFiles: [[fileId: 'eb0c7cc1-e851-4bc2-9401-2680c225f88c', targetLocation: '', variable: 'MAVEN_SETTINGS']]]) {
+ mvn '-s $MAVEN_SETTINGS -f pom.xml'
+}
+}
+
+def mvn(args) {
+ sh "${tool 'maven3'}/bin/mvn ${args} ${MAVEN_GOALS}"
+}
+
+return this \ No newline at end of file
diff --git a/aai-resources/src/main/jenkins/checkout.groovy b/aai-resources/src/main/jenkins/checkout.groovy
new file mode 100644
index 0000000..ed439ec
--- /dev/null
+++ b/aai-resources/src/main/jenkins/checkout.groovy
@@ -0,0 +1,14 @@
+
+def gitCheckout() {
+ stage 'Checkout GIT'
+ //different ways to checkout
+ //checkout from master
+ //git "url: ${GIT_URL}, branch: ${GIT_BRANCH}"
+ //checkout from branch hardcoding"
+ //git branch: 'jenkins_deploy_test', credentialsId: 'b9bbafe5-53ce-4d2c-8b84-09137f75c592', url: 'https://codecloud.web.att.com/scm/st_ocnp/sdk-java-starter.git'
+ //checkout from branch parameters with credentials
+ //git branch: "${GIT_BRANCH}", credentialsId: 'b9bbafe5-53ce-4d2c-8b84-09137f75c592', url: "${GIT_URL}"
+ //checkout from branch parameters with no credentials
+ git branch: "${GIT_BRANCH}", url: "${GIT_URL}"
+}
+return this \ No newline at end of file
diff --git a/aai-resources/src/main/jenkins/deploy.groovy b/aai-resources/src/main/jenkins/deploy.groovy
new file mode 100644
index 0000000..1a000e3
--- /dev/null
+++ b/aai-resources/src/main/jenkins/deploy.groovy
@@ -0,0 +1,15 @@
+def deployService(){
+ stage 'Deploying Service'
+
+ // get the jenkinsfile root directory
+ def ROOT_DIR = pwd()
+ ROOT_DIR = "${ROOT_DIR}"+'/src/main/kubernetes'
+ echo "ROOTDIR : ${ROOT_DIR}"
+ sh "/opt/app/kubernetes/v1.3.4/bin/kubectl --kubeconfig=${ROOT_DIR}/kubectl.conf replace --force --cascade -f ${ROOT_DIR}/${artifactId}-svc.yaml"
+ sh "/opt/app/kubernetes/v1.3.4/bin/kubectl --kubeconfig=${ROOT_DIR}/kubectl.conf replace --force --cascade -f ${ROOT_DIR}/${artifactId}-rc.yaml"
+}
+return this
+
+
+
+
diff --git a/aai-resources/src/main/kubernetes/ajsc6configdemo-rc.yaml b/aai-resources/src/main/kubernetes/ajsc6configdemo-rc.yaml
new file mode 100644
index 0000000..9addfe3
--- /dev/null
+++ b/aai-resources/src/main/kubernetes/ajsc6configdemo-rc.yaml
@@ -0,0 +1,28 @@
+apiVersion: v1
+kind: ReplicationController
+metadata:
+ name: ajsc6configdemo
+ namespace: org-onap-aai
+spec:
+ replicas: 5
+ selector:
+ app: ajsc6configdemo
+ template:
+ metadata:
+ labels:
+ app: ajsc6configdemo
+ spec:
+ serviceAccount: ajsc-svc-account
+ containers:
+ - name: ajsc6configdemo
+ volumeMounts:
+ - name: myservice-properties-volume
+ mountPath: /etc/config/dynamic
+ image: zlp11313.vci.att.com:5100/com.att.ajsc/ajsc6configdemo:latest
+ imagePullPolicy: Always
+ ports:
+ - containerPort: 8080
+ volumes:
+ - name: myservice-properties-volume
+ configMap:
+ name: ajsc6configdemo-dynamic-properties \ No newline at end of file
diff --git a/aai-resources/src/main/kubernetes/ajsc6configdemo-svc.yaml b/aai-resources/src/main/kubernetes/ajsc6configdemo-svc.yaml
new file mode 100644
index 0000000..f20e5b9
--- /dev/null
+++ b/aai-resources/src/main/kubernetes/ajsc6configdemo-svc.yaml
@@ -0,0 +1,21 @@
+{
+ "kind": "Service",
+ "apiVersion": "v1",
+ "metadata": {
+ "name": "ajsc6configdemo",
+ "namespace": "org-onap-aai"
+ },
+ "spec": {
+ "selector": {
+ "app": "ajsc6configdemo"
+ },
+ "ports": [
+ {
+ "protocol": "TCP",
+ "port": 80,
+ "targetPort": 8080
+ }
+ ],
+ "type": "NodePort"
+ }
+}
diff --git a/aai-resources/src/main/kubernetes/ajsc6configdemo.yaml b/aai-resources/src/main/kubernetes/ajsc6configdemo.yaml
new file mode 100644
index 0000000..545d7f6
--- /dev/null
+++ b/aai-resources/src/main/kubernetes/ajsc6configdemo.yaml
@@ -0,0 +1,8 @@
+apiVersion: v1
+data:
+ dynamic.properties: |
+ my.property.name=prod
+kind: ConfigMap
+metadata:
+ name: ajsc6configdemo-dynamic-properties
+ namespace: org-onap-aai \ No newline at end of file
diff --git a/aai-resources/src/main/kubernetes/kubectl.conf b/aai-resources/src/main/kubernetes/kubectl.conf
new file mode 100644
index 0000000..da4da58
--- /dev/null
+++ b/aai-resources/src/main/kubernetes/kubectl.conf
@@ -0,0 +1,22 @@
+current-context: baremetal-ajsc-01
+apiVersion: v1
+clusters:
+- cluster:
+ api-version: v1
+ server: https://hlxkvm022.vtil.att.com
+ insecure-skip-tls-verify: true
+ name: baremetal-ajsc-01
+contexts:
+- context:
+ cluster: baremetal-ajsc-01
+ namespace: org-onap-aai
+ user: m93659@ajsc.att.com
+ name: baremetal-ajsc-01
+kind: Config
+preferences:
+ colors: true
+users:
+- name: m93659@ajsc.att.com
+ user:
+ username: kube
+ password: F75I2d9bb16s
diff --git a/aai-resources/src/main/resources/application.properties b/aai-resources/src/main/resources/application.properties
new file mode 100644
index 0000000..05611c2
--- /dev/null
+++ b/aai-resources/src/main/resources/application.properties
@@ -0,0 +1,70 @@
+# The following info parameters are being referenced by ajsc6
+info.build.artifact=aai-resources
+info.build.name=resources
+info.build.description=Resources Microservice
+info.build.version=1.1.0
+
+spring.application.name=aai-resources
+spring.jersey.type=filter
+
+server.contextPath=/
+spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration
+
+spring.profiles.active=production
+#The max number of active threads in this pool
+server.tomcat.max-threads=200
+#The minimum number of threads always kept alive
+server.tomcat.min-Spare-Threads=25
+#The number of milliseconds before an idle thread shutsdown, unless the number of active threads are less or equal to minSpareThreads
+server.tomcat.max-idle-time=60000
+
+
+#Add this properties only if you want to change the URL, AJSC Framework interceptors will intercept
+#com.att.ajsc.common.interceptors.PreInterceptor.url=/**
+#com.att.ajsc.common.interceptors.PostInterceptor.url=/**
+
+#Servlet context parameters
+server.context_parameters.p-name=value #context parameter with p-name as key and value as value.
+kubernetes.namespace=org-onap-aai
+
+# If you get an application startup failure that the port is already taken
+# If thats not it, please check if the key-store file path makes sense
+server.local.startpath=aai-resources/src/main/resources/
+server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
+
+server.port=8447
+server.ssl.enabled-protocols=TLSv1.1,TLSv1.2
+server.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore
+server.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+server.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
+server.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+server.ssl.client-auth=want
+server.ssl.key-store-type=JKS
+
+# JMS bind address host port
+jms.bind.address=tcp://localhost:61647
+dmaap.ribbon.eureka.enabled=false
+dmaap.ribbon.listOfServers=localhost:3904
+# Number of milliseconds to wait before making ping requests again
+dmaap.ribbon.ServerListRefreshInterval=75000
+dmaap.ribbon.NFLoadBalancerPingInterval=75000
+dmaap.ribbon.NFLoadBalancerRuleClassName=com.netflix.loadbalancer.AvailabilityFilteringRule
+dmaap.ribbon.NFLoadBalancerPingClassName=org.onap.aai.config.HttpPingImpl
+dmaap.ribbon.EnableMarkingServerDownOnReachingFailureLimit=true
+dmaap.ribbon.ServerDownFailureLimit=1
+# This needs to be verified but it seems that adding this property should automatically
+# Make the dmaap client change the url from http to https depending on the server
+dmaap.ribbon.securePorts=3905
+
+# Custom Dmaap Specific Configuration
+dmaap.ribbon.health.endpoint=/topics/AAI-EVENT
+# Number of seconds to wait for the ping to work and might need to increase this if the pings are all failing
+dmaap.ribbon.pingport.timeout=3
+
+niws.loadbalancer.dmaap.filterCircuitTripped=true
+niws.loadbalancer.dmaap.connectionFailureCountThreshold=3
+niws.loadbalancer.dmaap.circuitTripMaxTimeoutSeconds=180
+#dmaap.ribbon.retryableStatusCodes=404,503
+#dmaap.ribbon.retryableStatusCodes.MaxAutoRetriesNextServer=2
+#dmaap.ribbon.retryableStatusCodes.MaxAutoRetries=2
+#dmaap.ribbon.retryableStatusCodes.OkToRetryOnAllOperations=true
diff --git a/aai-resources/src/main/resources/bootstrap.properties b/aai-resources/src/main/resources/bootstrap.properties
new file mode 100644
index 0000000..65a28ac
--- /dev/null
+++ b/aai-resources/src/main/resources/bootstrap.properties
@@ -0,0 +1,16 @@
+# Activate Spring Cloud Config Server functionality, these should remain false while using the file based implementation to prevent conflicts with properties pulled from a remote server.
+spring.cloud.config.discovery.enabled=false
+spring.cloud.config.enabled=false
+
+# Allow Spring Cloud properties configuration to override properties set from System Properties and ENV Properties
+spring.cloud.config.overrideSystemProperties=true
+
+# Path to the file containing the dynamic properties
+com.att.ajsc.dynamic.properties.path=/etc/config/dynamic/dynamic.properties
+
+
+# Path to the file containing the dynamic logger levels
+com.att.ajsc.dynamic.logging.path=/etc/config/logging/logging.properties
+
+# File watcher polling frequency in milliseconds
+com.att.ajsc.dynamic.watcher.delay=5000 \ No newline at end of file
diff --git a/aai-resources/src/main/resources/docker/Dockerfile.ext b/aai-resources/src/main/resources/docker/Dockerfile.ext
deleted file mode 100644
index 6beaf58..0000000
--- a/aai-resources/src/main/resources/docker/Dockerfile.ext
+++ /dev/null
@@ -1,76 +0,0 @@
-FROM ubuntu:14.04
-
-ENV DEBIAN_FRONTEND noninteractive
-
-RUN apt-get update && apt-get install -y software-properties-common
-
-# sudo -E is required to preserve the environment
-# If you remove that line, it will most like freeze at this step
-
-RUN sudo -E add-apt-repository ppa:openjdk-r/ppa && \
- apt-get update && \
- apt-get -qq install -y openjdk-8-jre-headless git curl ksh
-
-# Setup JAVA_HOME, this is useful for docker commandline
-ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
-
-# Install Chef
-RUN curl -LO https://packages.chef.io/stable/ubuntu/14.04/chefdk_0.17.17-1_amd64.deb
-RUN dpkg -i chefdk_0.17.17-1_amd64.deb
-RUN rm chefdk_0.17.17-1_amd64.deb
-
-# Add the application folder and common libs to /opt inside container
-# Add the chef script and startup script to docker container
-# Change the permissions to enable execute access
-
-ADD ./opt/app /opt/app
-ADD ./commonLibs/ /opt/app/commonLibs/
-
-ADD init-chef.sh /init-chef.sh
-ADD startup.sh /startup.sh
-ADD aai.sh /etc/profile.d/aai.sh
-
-RUN chmod 755 /init-chef.sh /startup.sh
-RUN chmod 644 /etc/profile.d/aai.sh
-
-# When the container is started this is the entrypoint script
-# that docker will run. Make sure this script doesn't end abruptly
-# If you want the container running even if the main application stops
-# You can run a ever lasting process like tail -f /dev/null
-# Or something like that at the end of the startup script
-# So if the main application you are planning on running fails
-# the docker container keeps on running forever
-
-ENTRYPOINT ./startup.sh
-
-# Expose the ports for outside linux to use
-# 8443 is the important one to be used
-
-EXPOSE 8443
-EXPOSE 8080
-
-# Create the directory structure of aai application resembling the development server
-# hard-coding path to match ajsc version
-
-RUN mkdir /opt/aaihome && \
- useradd -ms /bin/bash -d /opt/aaihome/aaiadmin aaiadmin && \
- ln -s /opt/app/${project.artifactId}/${project.version} /opt/app/aai && \
- chown aaiadmin:aaiadmin /opt/app/aai && \
- chown -R aaiadmin:aaiadmin /opt/app/${project.artifactId}/${project.version} && \
- mkdir -p /opt/aai/logroot && \
- chown -R aaiadmin:aaiadmin /opt/aai/logroot && \
- ln -s /opt/app/aai/bin scripts && \
- mkdir /opt/app/aai/extApps && chown -R aaiadmin:aaiadmin /opt/app/aai/extApps && \
- find /opt/app/aai/bin -name "*.sh" -exec chmod 755 {} +
-
-WORKDIR /var/chef
-
-RUN chown aaiadmin:aaiadmin /startup.sh && \
- chown -R aaiadmin:aaiadmin /var/chef
-
-RUN mkdir /opt/aai/logroot/AAI && chown aaiadmin:aaiadmin /opt/aai/logroot/AAI
-VOLUME /opt/aai/logroot/AAI
-
-WORKDIR /
-
-USER aaiadmin
diff --git a/aai-resources/src/main/resources/docker/commonLibs/README b/aai-resources/src/main/resources/docker/commonLibs/README
deleted file mode 100644
index 00e36c0..0000000
--- a/aai-resources/src/main/resources/docker/commonLibs/README
+++ /dev/null
@@ -1 +0,0 @@
-// this file's presence ensures commonLibs folder is present when image is created \ No newline at end of file
diff --git a/aai-resources/src/main/resources/docker/docker-entrypoint.sh b/aai-resources/src/main/resources/docker/docker-entrypoint.sh
deleted file mode 100644
index 695d515..0000000
--- a/aai-resources/src/main/resources/docker/docker-entrypoint.sh
+++ /dev/null
@@ -1,104 +0,0 @@
-#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-# Set the current path to be the application home and common libs home
-APP_HOME=$(pwd);
-COMMONLIBS_HOME="/opt/app/commonLibs";
-
-export CHEF_CONFIG_REPO=${CHEF_CONFIG_REPO:-aai-config};
-export CHEF_GIT_URL=${CHEF_GIT_URL:-http://gerrit.onap.org/r/aai};
-export CHEF_CONFIG_GIT_URL=${CHEF_CONFIG_GIT_URL:-$CHEF_GIT_URL};
-export CHEF_DATA_GIT_URL=${CHEF_DATA_GIT_URL:-$CHEF_GIT_URL};
-
-USER_ID=${LOCAL_USER_ID:-9001}
-
-if [ $(cat /etc/passwd | grep aaiadmin | wc -l) -eq 0 ]; then
- useradd --shell=/bin/bash -u ${USER_ID} -o -c "" -m aaiadmin || {
- echo "Unable to create the user id for ${USER_ID}";
- exit 1;
- }
-fi;
-
-chown -R aaiadmin:aaiadmin /opt/app /var/chef /opt/aai/logroot
-
-gosu aaiadmin ./init-chef.sh || {
- echo "Unable to run init chef script, please check logs";
- exit 1;
-}
-
-httpPort=8087;
-httpsPort=8447;
-
-AAI_CORE_JAR=$(ls ${APP_HOME}/extJars/aai-core*.jar);
-
-# Get the basename of the aai-core-X.Y.Z-SNAPSHOT.jar
-# Then extract the X.Y.Z-SNAPSHOT part
-AAI_CORE_VERSION=$(basename ${AAI_CORE_JAR} | grep -o '[0-9]*\.[0-9]*\.[0-9]*[^.]*');
-
-cd ${APP_HOME};
-
-CP=${COMMONLIBS_HOME}/*;
-CP="$CP":${APP_HOME}/etc;
-CP="$CP":${APP_HOME}/lib/*;
-CP="$CP":${APP_HOME}/extJars/logback-access-1.1.7.jar;
-CP="$CP":${APP_HOME}/extJars/logback-core-1.1.7.jar;
-CP="$CP":${AAI_CORE_JAR};
-
-# You can add additional jvm options by adding environment variable JVM_PRE_OPTS
-# If you need to add more jvm options at the end then you can use JVM_POST_OPTS
-JVM_OPTS="${JVM_PRE_OPTS} ${JVM_OPTS}";
-JVM_OPTS="${JVM_OPTS} -server -XX:NewSize=512m -XX:MaxNewSize=512m";
-JVM_OPTS="${JVM_OPTS} -XX:SurvivorRatio=8";
-JVM_OPTS="${JVM_OPTS} -XX:+DisableExplicitGC -verbose:gc -XX:+UseParNewGC";
-JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled -XX:+CMSClassUnloadingEnabled";
-JVM_OPTS="${JVM_OPTS} -XX:+UseConcMarkSweepGC -XX:-UseBiasedLocking";
-JVM_OPTS="${JVM_OPTS} -XX:ParallelGCThreads=4";
-JVM_OPTS="${JVM_OPTS} -XX:LargePageSizeInBytes=128m ";
-JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails -XX:+PrintGCTimeStamps";
-JVM_OPTS="${JVM_OPTS} -Xloggc:${APP_HOME}/logs/gc/graph-query_gc.log";
-JVM_OPTS="${JVM_OPTS} -XX:+HeapDumpOnOutOfMemoryError";
-JVM_OPTS="${JVM_OPTS} ${JVM_POST_OPTS}";
-
-# You can add additional java options by adding environment variable JAVA_PRE_OPTS
-# If you need to add more jvm options at the end then you can use JAVA_POST_OPTS
-JAVA_OPTS="${JAVA_PRE_OPTS} ${JAVA_OPTS}";
-JAVA_OPTS="${JAVA_OPTS} -Dsun.net.inetaddr.ttl=180";
-JAVA_OPTS="${JAVA_OPTS} -Dhttps.protocols=TLSv1.1,TLSv1.2";
-JAVA_OPTS="${JAVA_OPTS} -DSOACLOUD_SERVICE_VERSION=1.0.1";
-JAVA_OPTS="${JAVA_OPTS} -DAJSC_HOME=${APP_HOME}";
-JAVA_OPTS="${JAVA_OPTS} -DAJSC_CONF_HOME=${APP_HOME}/bundleconfig";
-JAVA_OPTS="${JAVA_OPTS} -DAJSC_SHARED_CONFIG=${APP_HOME}/bundleconfig";
-JAVA_OPTS="${JAVA_OPTS} -DAFT_HOME=${APP_HOME}";
-JAVA_OPTS="${JAVA_OPTS} -DAAI_CORE_VERSION=${AAI_CORE_VERSION}";
-JAVA_OPTS="${JAVA_OPTS} -Daai-core.version=${AAI_CORE_VERSION}";
-JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=${APP_HOME}/bundleconfig/etc/logback.xml";
-JAVA_OPTS="${JAVA_OPTS} ${JAVA_POST_OPTS}";
-
-JAVA_ARGS="${JAVA_PRE_ARGS} ${JAVA_ARGS}";
-JAVA_ARGS="${JAVA_ARGS} context=/";
-JAVA_ARGS="${JAVA_ARGS} port=$httpPort";
-JAVA_ARGS="${JAVA_ARGS} sslport=$httpsPort";
-JAVA_ARGS="${JAVA_ARGS} ${JAVA_POST_ARGS}";
-
-JAVA_CMD="exec gosu aaiadmin java";
-# Run the following command as aai-admin using gosu and make that process main
-${JAVA_CMD} -cp ${CLASSPATH}:${CP} ${JVM_OPTS} ${JAVA_OPTS} com.att.ajsc.runner.Runner ${JAVA_ARGS} "$@"
diff --git a/aai-resources/src/main/resources/docker/init-chef.sh b/aai-resources/src/main/resources/docker/init-chef.sh
deleted file mode 100644
index 416c25f..0000000
--- a/aai-resources/src/main/resources/docker/init-chef.sh
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-##############################################################################
-# Script to initialize the chef-repo branch and.chef
-#
-##############################################################################
-
-cd /var/chef;
-
-if [ ! -d "aai-config" ]; then
-
- git clone --depth 1 -b ${CHEF_BRANCH} --single-branch ${CHEF_CONFIG_GIT_URL}/${CHEF_CONFIG_REPO}.git aai-config || {
- echo "Error: Unable to clone the aai-config repo with url: ${CHEF_GIT_URL}/${CHEF_CONFIG_REPO}.git";
- exit 1;
- }
-
-fi
-
-if [ -d "aai-config/cookbooks/aai-resources" ]; then
-
- (cd aai-config/cookbooks/aai-resources/ && \
- for f in $(ls); do mv $f ../; done && \
- cd ../ && rmdir aai-resources);
-
-fi;
-
-if [ ! -d "aai-data" ]; then
-
- git clone --depth 1 -b ${CHEF_BRANCH} --single-branch ${CHEF_DATA_GIT_URL}/aai-data.git aai-data || {
- echo "Error: Unable to clone the aai-data repo with url: ${CHEF_GIT_URL}";
- exit 1;
- }
-
-fi
-
-chef-solo \
- -c /var/chef/aai-data/chef-config/dev/.knife/solo.rb \
- -j /var/chef/aai-config/cookbooks/runlist-aai-resources.json \
- -E ${AAI_CHEF_ENV};
-
-TITAN_REALTIME="/opt/app/aai-resources/bundleconfig/etc/appprops/titan-realtime.properties";
-
-if [ ! -f ${TITAN_REALTIME} ]; then
- echo "Unable to find the titan realtime file";
- exit 1;
-fi
-
-HBASE_HOSTNAME=$(grep "storage.hostname" ${TITAN_REALTIME} | cut -d"=" -f2-);
-HBASE_PORT="${HBASE_PORT:-2181}";
-NUM_OF_RETRIES=${NUM_OF_RETRIES:-200};
-retry=0;
-
-# Tries to connect to hbase on port 2181 until success
-# or until the number of retries by default 500 times
-# with each time giving a sleep of 5 seconds between
-# each time it pings the port on that host
-
-while ! nc -z ${HBASE_HOSTNAME} ${HBASE_PORT} ;
-do
- if [ $retry -eq $NUM_OF_RETRIES ]; then
- echo "Unable to connect to hbase after $NUM_OF_RETRIES retries, please check if hbase server is properly configured and be able to connect";
- exit 1;
- fi;
-
- echo "Waiting for hbase to be up";
- sleep 5;
-
- retry=$((retry + 1));
-done
-
-HBASE_STARTUP_ARTIFICIAL_DELAY=${HBASE_STARTUP_ARTIFICIAL_DELAY:-50};
-
-# By default the artificial delay will be introduced
-# the user can override it by set DISABLE_HBASE_STARTUP_ARTIFICIAL_DELAY to some string
-
-if [ -z "${DISABLE_HBASE_STARTUP_ARTIFICIAL_DELAY}" ]; then
- sleep ${HBASE_STARTUP_ARTIFICIAL_DELAY};
-fi;
-
-/opt/app/aai-resources/bin/createDBSchema.sh || {
- echo "Error: Unable to create the db schema, please check if the hbase host is configured and up";
- exit 1;
-}
diff --git a/aai-resources/src/main/resources/etc/VNT-migration-data/VNT-migration-input.csv b/aai-resources/src/main/resources/etc/VNT-migration-data/VNT-migration-input.csv
new file mode 100644
index 0000000..916dced
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/VNT-migration-data/VNT-migration-input.csv
@@ -0,0 +1,2 @@
+"entitlement-pool-uuid vendor-allowed-max-bandwidth (VNT)"
+"some-uuid 1000"
diff --git a/aai-resources/src/main/resources/etc/appprops/Introscope.properties b/aai-resources/src/main/resources/etc/appprops/Introscope.properties
new file mode 100644
index 0000000..319381e
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/Introscope.properties
@@ -0,0 +1,8 @@
+#CSI environment uses the Introscope java agent for monitoring services. The AJSC has provided an implementation class that
+#provides basic information to the Introscope Enterprise Manager for each http request/response.
+
+introscopeEventClass=com.att.ajsc.introscope.IntroscopeEventNotifierImpl
+serviceName=N/A
+conversationId=N/A
+uniqueID=N/A
+userID=N/A
diff --git a/aai-resources/src/main/resources/etc/appprops/PostProcessorInterceptors.properties b/aai-resources/src/main/resources/etc/appprops/PostProcessorInterceptors.properties
new file mode 100644
index 0000000..ca31a26
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/PostProcessorInterceptors.properties
@@ -0,0 +1,3 @@
+#This properties file is for defining any PostProcessorInterceptors that have been created for your AJSC service.
+
+/**=org.onap.aai.interceptors.PostAaiAjscInterceptor
diff --git a/aai-resources/src/main/resources/etc/appprops/PreProcessorInterceptors.properties b/aai-resources/src/main/resources/etc/appprops/PreProcessorInterceptors.properties
new file mode 100644
index 0000000..1620bea
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/PreProcessorInterceptors.properties
@@ -0,0 +1,3 @@
+#This properties file is for defining any PreProcessorInterceptors that have been created for your AJSC service.
+
+/**=com.att.ajsc.csi.restmethodmap.RestMethodMapInterceptor,org.onap.aai.interceptors.PreAaiAjscInterceptor
diff --git a/aai-resources/src/main/resources/etc/appprops/aaiconfig.properties b/aai-resources/src/main/resources/etc/appprops/aaiconfig.properties
new file mode 100644
index 0000000..84072f9
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/aaiconfig.properties
@@ -0,0 +1,165 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+####################################################################
+# REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE
+# TEMPLATE AND *ALL* DATAFILES
+####################################################################
+
+aai.config.checktime=1000
+
+# this could come from siteconfig.pl?
+aai.config.nodename=AutomaticallyOverwritten
+
+aai.logging.hbase.interceptor=true
+aai.logging.hbase.enabled=true
+aai.logging.hbase.logrequest=true
+aai.logging.hbase.logresponse=true
+
+aai.logging.trace.enabled=true
+aai.logging.trace.logrequest=false
+aai.logging.trace.logresponse=false
+
+aai.transaction.logging=true
+aai.transaction.logging.get=true
+aai.transaction.logging.post=true
+
+aai.tools.enableBasicAuth=true
+aai.tools.username=AAI
+aai.tools.password=AAI
+
+ecm.openstack.tenantid=b0a529aba48440a39e0caf1aea9b27e3
+
+aai.avpn.interim.laginterface=ae0
+
+aai.server.url.base=https://localhost:8443/aai/
+aai.server.url=https://localhost:8443/aai/v12/
+aai.global.callback.url=https://localhost:8443/aai/
+
+aai.auth.cspcookies_on=false
+aai.dbmodel.filename=ex5.json
+aai.truststore.filename=aai_keystore
+aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
+aai.keystore.filename=aai-client-cert.p12
+aai.keystore.passwd.x=
+
+# for transaction log
+hbase.table.name=aailogging-dev1.dev
+hbase.notificationTable.name=aainotification-dev1.dev
+hbase.table.timestamp.format=YYYYMMdd-HH:mm:ss:SSS
+hbase.zookeeper.quorum=ONAPserverTBD
+hbase.zookeeper.property.clientPort=2181
+hbase.zookeeper.znode.parent=/hbase
+
+# pin up a skeleton if it's not found
+
+aai.precheck.cloudinfrastructure.complex.defaults.physicalLocationType=AAIDefault
+aai.precheck.cloudinfrastructure.complex.defaults.street1=AAIDefault
+aai.precheck.cloudinfrastructure.complex.defaults.city=AAIDefault
+aai.precheck.cloudinfrastructure.complex.defaults.state=NJ
+aai.precheck.cloudinfrastructure.complex.defaults.postalCode=07748
+aai.precheck.cloudinfrastructure.complex.defaults.country=USA
+aai.precheck.cloudinfrastructure.complex.defaults.region=US
+
+# single primary server
+aai.primary.filetransfer.serverlist=ONAPserverTBD
+aai.primary.filetransfer.primarycheck=echo:8443/aai/util/echo
+aai.primary.filetransfer.pingtimeout=5000
+aai.primary.filetransfer.pingcount=5
+
+#rsync properties
+aai.rsync.command=rsync
+aai.rsync.options.list=-v|-t
+aai.rsync.remote.user=aaiadmin
+aai.rsync.enabled=y
+
+#Service Specific Data Values
+aai.servicedescription.hostedcomm=HOSTED COMMUNICATIONS
+aai.servicedescription.mobility=MOBILITY
+aai.servicedescription.vusp=VIRTUAL USP
+aai.servicedescription.vvig=vVIGaaS
+aai.servicedescription.lrsi=LRSI-OSPF
+
+aai.notification.current.version=v12
+aai.notificationEvent.default.status=UNPROCESSED
+aai.notificationEvent.default.eventType=AAI-EVENT
+aai.notificationEvent.default.domain=devINT1
+aai.notificationEvent.default.sourceName=aai
+aai.notificationEvent.default.sequenceNumber=0
+aai.notificationEvent.default.severity=NORMAL
+aai.notificationEvent.default.version=v12
+# This one lets us enable/disable resource-version checking on updates/deletes
+aai.resourceversion.enableflag=true
+aai.logging.maxStackTraceEntries=10
+aai.default.api.version=v12
+
+# Used by Model-processing code
+aai.model.delete.sleep.per.vtx.msec=500
+aai.model.query.resultset.maxcount=50
+aai.model.query.timeout.sec=90
+
+# Used by Data Grooming
+aai.grooming.default.max.fix=150
+aai.grooming.default.sleep.minutes=7
+
+# Used by DupeTool
+aai.dupeTool.default.max.fix=25
+aai.dupeTool.default.sleep.minutes=7
+
+aai.model.proc.max.levels=50
+aai.edgeTag.proc.max.levels=50
+
+# Used by the ForceDelete tool
+aai.forceDel.protected.nt.list=cloud-region
+aai.forceDel.protected.edge.count=10
+aai.forceDel.protected.descendant.count=10
+
+# Used for CTAG-Pool generation
+aai.ctagPool.rangeString.vplsPe1=2001-2500
+aai.ctagPool.rangeString.vplsPe2=2501-3000
+aai.ctagPool.rangeString.vpe=3001-3500
+
+aai.dmaap.workload.enableEventProcessing=true
+
+aai.realtime.clients=RO,SDNC,MSO
+
+aai.server.rebind=g
+
+# This is a fake one just for test so please ignore
+ecm.auth.password.x=OBF:1igd1i9a1jnb1yte1vv11vu91yt81jk71i6o1idt
+aai.run.migrations=false
+aai.jms.enable=false
+
+#timeout for crud enabled flag
+aai.crud.timeoutenabled=true
+
+#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)
+aai.crud.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1|DCAE-CCS,-1|DCAES,-1|AAIRctFeed,-1|NewvceCreator,-1|IANewvceCreator,-1|AAI-CSIOVALS,-1
+
+#default timeout limit added for crud if not overridden (in ms)
+aai.crud.timeoutlimit=100000
+#limit set for bulk consumer APIS
+aai.bulkconsumer.payloadlimit=30
+
+#uncomment and use header X-OverrideLimit with the value to override the bulk api limit
+#aai.bulkconsumer.payloadoverride=E6F04B93462CB5B0EDF41C05A9DDF5C3FE59748F
+aai.bulkconsumer.payloadoverride=false
diff --git a/aai-resources/src/main/resources/etc/appprops/app-intercepts.properties b/aai-resources/src/main/resources/etc/appprops/app-intercepts.properties
new file mode 100644
index 0000000..3230921
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/app-intercepts.properties
@@ -0,0 +1,6 @@
+#This is where all your application intercept strategies must be configured. AJSC reads this property file and adds
+#the list of intercepts specified here to the camel context. This can be useful for accessing every exchange object transferred from/to
+#each endpoint in the request/response flow and can allow for more precise debugging and/or processing of the exchange.
+
+#e.g.
+#intercepts=org.openecomp.aai.ajsc_aai.JaxrsEchoService,packagename.class1name,packagename.class2name
diff --git a/aai-resources/src/main/resources/etc/appprops/caet.properties b/aai-resources/src/main/resources/etc/appprops/caet.properties
new file mode 100644
index 0000000..ac4960c
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/caet.properties
@@ -0,0 +1,4 @@
+#caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=3/envContext=TEST/routeOffer=TEST_CAET
+#caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=3/envContext=TEST/routeOffer=D3A_CAET
+#caet_service=dme2://DME2RESOLVE/service=com.att.csid.CAET/version=4.0/envContext=TEST/routeOffer=TEST_CAET
+caet_service=http://DME2RESOLVE/service=com.att.csid.CAET/version=4.0/envContext=TEST/routeOffer=TEST_CAET \ No newline at end of file
diff --git a/aai-resources/src/main/resources/etc/appprops/createDBSchema-logback.xml b/aai-resources/src/main/resources/etc/appprops/createDBSchema-logback.xml
new file mode 100644
index 0000000..5fcdf9b
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/createDBSchema-logback.xml
@@ -0,0 +1,131 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+
+ <appender name="createDBSchema" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/createDBSchema/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/createDBSchema/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="createDBSchemadebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/createDBSchema/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/createDBSchema/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="createDBSchemametric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/createDBSchema/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/createDBSchema/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>%-4relative [%thread] %-5level %logger{35} - %msg %n</encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="org.onap.aai" level="DEBUG" additivity="false">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+
+ <root level="DEBUG">
+ <appender-ref ref="createDBSchema"/>
+ <appender-ref ref="createDBSchemadebug"/>
+ <appender-ref ref="createDBSchemametric"/>
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/dataGrooming-logback.xml b/aai-resources/src/main/resources/etc/appprops/dataGrooming-logback.xml
new file mode 100644
index 0000000..d899d90
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/dataGrooming-logback.xml
@@ -0,0 +1,138 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+
+ <appender name="dataGrooming" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/dataGrooming/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataGrooming/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="dataGroomingdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dataGrooming/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataGrooming/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="dataGroomingmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dataGrooming/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataGrooming/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>%-4relative [%thread] %-5level %logger{35} - %msg %n</encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="com.sun.jersey.api.client" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="org.eclipse.jetty.util.log" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+ <logger name="java.net" level="WARN" additivity="false">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ </logger>
+
+ <root level="INFO">
+ <appender-ref ref="dataGrooming"/>
+ <appender-ref ref="dataGroomingdebug"/>
+ <appender-ref ref="dataGroomingmetric"/>
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/dataSnapshot-logback.xml b/aai-resources/src/main/resources/etc/appprops/dataSnapshot-logback.xml
new file mode 100644
index 0000000..9633971
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/dataSnapshot-logback.xml
@@ -0,0 +1,139 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+
+ <appender name="dataSnapshot" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/dataSnapshot/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataSnapshot/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="dataSnapshotdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dataSnapshot/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataSnapshot/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="dataSnapshotmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dataSnapshot/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dataSnapshot/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>%-4relative [%thread] %-5level %logger{35} - %msg %n</encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ <appender-ref ref="STDOUT"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="com.sun.jersey.api.client" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="org.eclipse.jetty.util.log" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+ <logger name="java.net" level="WARN" additivity="false">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ </logger>
+
+ <root level="INFO">
+ <appender-ref ref="dataSnapshot"/>
+ <appender-ref ref="dataSnapshotdebug"/>
+ <appender-ref ref="dataSnapshotmetric"/>
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/default-logback.xml b/aai-resources/src/main/resources/etc/appprops/default-logback.xml
new file mode 100644
index 0000000..655157b
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/default-logback.xml
@@ -0,0 +1,43 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration debug="false">
+ <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" />
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>${defaultPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN"/>
+ <logger name="org.apache.zookeeper" level="WARN"/>
+ <logger name="org.apache.hadoop" level="WARN"/>
+ <logger name="com.thinkaurelius" level="WARN"/>
+ <logger name="ch.qos.logback.classic" level="WARN" />
+ <logger name="ch.qos.logback.core" level="WARN" />
+
+ <root level="INFO">
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/dupeTool-logback.xml b/aai-resources/src/main/resources/etc/appprops/dupeTool-logback.xml
new file mode 100644
index 0000000..8c3e977
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/dupeTool-logback.xml
@@ -0,0 +1,62 @@
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="dupeToollog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/dupeTool/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dupeTool/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="com.att.eelf" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+ <logger name="org.onap.aai" level="ERROR" additivity="false">
+ <appender-ref ref="dupeToollog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="dupeToollog" />
+ </root>
+</configuration> \ No newline at end of file
diff --git a/aai-resources/src/main/resources/etc/appprops/dynamic.properties b/aai-resources/src/main/resources/etc/appprops/dynamic.properties
new file mode 100644
index 0000000..e8f9a90
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/dynamic.properties
@@ -0,0 +1,35 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+
+#Kept the below if we need to change from in-memory to dynamic instance
+#storage.hostname=mtanjv9aads01.nvp.cip.att.com,mtanjv9aads02.nvp.cip.att.com,mtanjv9aads03.nvp.cip.att.com
+#storage.hbase.table=tenant-isol-temp
+
+#caching on
+cache.db-cache = true
+cache.db-cache-clean-wait = 20
+cache.db-cache-time = 180000
+cache.db-cache-size = 0.3
+load.snapshot.file=true
diff --git a/aai-resources/src/main/resources/etc/appprops/dynamicPayloadGenerator-logback.xml b/aai-resources/src/main/resources/etc/appprops/dynamicPayloadGenerator-logback.xml
new file mode 100644
index 0000000..025b49b
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/dynamicPayloadGenerator-logback.xml
@@ -0,0 +1,85 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="dynamicPayloadGeneratorlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/dynamicPayloadGeneratorlog/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dynamicPayloadGeneratorlog/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="com.att.eelf" level="ERROR" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="dynamicPayloadGeneratorlog" />
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/error.properties b/aai-resources/src/main/resources/etc/appprops/error.properties
new file mode 100644
index 0000000..d3dbf9f
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/error.properties
@@ -0,0 +1,173 @@
+# Adding comment trying to trigger a build
+#------------------------------------------------------------------------------- ----------
+#Key=Disposition:Category:Severity:Error Code:HTTP ResponseCode:RESTError Code:Error Message
+#------------------------------------------------------------------------------- ----------
+# testing code, please don't change unless error utility source code changes
+AAI_TESTING=5:2:WARN:0000:400:0001:Error code for testing
+
+# General success
+AAI_0000=0:0:INFO:0000:200:0000:Success
+
+# health check success
+AAI_0001=0:0:INFO:0001:200:0001:Success X-FromAppId=%1 X-TransactionId=%2
+AAI_0002=0:0:INFO:0002:200:0001:Successful health check
+
+# Success with additional info
+AAI_0003=0:3:INFO:0003:202:0003:Success with additional info performing %1 on %2. Added %3 with key %4
+AAI_0004=0:3:INFO:0004:202:0003:Added prerequisite object to db
+
+#--- aairest: 3000-3299
+# svc errors
+AAI_3000=5:2:INFO:3000:400:3000:Invalid input performing %1 on %2
+AAI_3001=5:6:INFO:3001:404:3001:Resource not found for %1 using id %2
+AAI_3002=5:1:WARN:3002:400:3002:Error writing output performing %1 on %2
+AAI_3003=5:1:WARN:3003:400:3003:Failed to make edge to missing target node of type %3 with keys %4 performing %1 on %2
+AAI_3005=5:6:WARN:3005:404:3001:Node cannot be directly accessed for read, must be accessed via ancestor(s)
+AAI_3006=5:6:WARN:3006:404:3001:Node cannot be directly accessed for write, must be accessed via ancestor(s)
+AAI_3007=5:6:INFO:3007:410:3007:This version (%1) of the API is retired, please migrate to %2
+AAI_3008=5:6:WARN:3008:400:3008:URI is not encoded in UTF-8
+AAI_3009=5:6:WARN:3009:400:3002:Malformed URL
+AAI_3010=5:6:WARN:3010:400:3002:Cannot write via this URL
+AAI_3011=5:6:WARN:3011:400:3000:Unknown XML namespace used in payload
+AAI_3012=5:6:WARN:3012:400:3012:Unrecognized AAI function
+AAI_3013=5:6:WARN:3013:400:3013:Query payload missing required parameters %1
+AAI_3014=5:6:WARN:3014:400:3014:Query payload is invalid %1
+# pol errors
+AAI_3100=5:1:WARN:3100:400:3100:Unsupported operation %1
+AAI_3101=5:1:WARN:3101:403:3101:Attempt by client %1 to execute API %2
+AAI_3102=5:1:WARN:3102:400:3102:Error parsing input performing %1 on %2
+AAI_3300=5:1:WARN:3300:403:3300:Unauthorized
+AAI_3301=5:1:WARN:3301:401:3301:Stale credentials
+AAI_3302=5:1:WARN:3302:401:3301:Not authenticated
+AAI_3303=5:1:WARN:3303:403:3300:Too many objects would be returned by this request, please refine your request and retry
+
+#--- aaigen: 4000-4099
+AAI_4000=5:4:ERROR:4000:500:3002:Internal Error
+AAI_4001=5:4:FATAL:4001:500:3002:Configuration file not found
+AAI_4002=5:4:FATAL:4002:500:3002:Error reading Configuration file
+AAI_4003=5:4:ERROR:4003:500:3002:Error writing to log file
+AAI_4004=5:4:FATAL:4004:500:3002:Error reading/parsing the error properties file
+AAI_4005=5:4:FATAL:4005:500:3002:Missing or invalid configuration parameter
+AAI_4006=5:4:FATAL:4006:500:3002:Unexpected error in service
+AAI_4007=5:4:WARN:4007:500:3102:Input parsing error
+AAI_4008=5:4:ERROR:4008:500:3002:Output parsing error
+AAI_4009=4:0:WARN:4009:400:3000:Invalid X-FromAppId in header
+AAI_4010=4:0:WARN:4010:400:3000:Invalid X-TransactionId in header
+AAI_4011=5:4:ERROR:4011:500:3002:Missing data for REST error response
+AAI_4014=4:0:WARN:4014:400:3000:Invalid Accept header
+AAI_4015=4:0:WARN:4015:400:3000:You must provide at least one indexed property
+AAI_4016=4:0:WARN:4016:400:3000:The depth parameter must be a number or the string "all"
+AAI_4017=5:2:INFO:4017:400:3000:Could not set property
+AAI_4018=5:2:WARN:4018:400:3000:Unable to convert the string to integer
+#--- aaidbmap: 5102-5199
+AAI_5102=5:4:FATAL:5102:500:3002:Graph database is null after open
+AAI_5105=5:4:ERROR:5105:500:3002:Unexpected error reading/updating database
+AAI_5106=5:4:WARN:5106:404:3001:Node not found
+AAI_5107=5:2:WARN:5107:400:3000:Required information missing
+AAI_5108=5:2:WARN:5108:200:0:Unexpected information in request being ignored
+
+#--- aaidbgen: 6101-6199
+AAI_6101=5:4:ERROR:6101:500:3002:null TitanGraph object passed
+AAI_6102=5:4:WARN:6102:400:3000:Passed-in property is not valid for this nodeType
+AAI_6103=5:4:WARN:6103:400:3000:Required Node-property not found in input data
+AAI_6104=5:4:WARN:6104:400:3000:Required Node-property was passed with no data
+AAI_6105=5:4:WARN:6105:400:3000:Node-Key-Property not defined in DbMaps
+AAI_6106=5:4:WARN:6106:400:3000:Passed-in property is not valid for this edgeType
+AAI_6107=5:4:WARN:6107:400:3000:Required Edge-property not found in input data
+AAI_6108=5:4:WARN:6108:400:3000:Required Edge-property was passed with no data
+AAI_6109=5:4:WARN:6109:400:3000:Bad dependent Node value
+AAI_6110=5:4:ERROR:6110:400:3100:Node cannot be deleted
+AAI_6111=5:4:WARN:6111:400:3000:JSON processing error
+AAI_6112=5:4:ERROR:6112:400:3000:More than one node found by getUniqueNode()
+AAI_6114=5:4:INFO:6114:404:3001:Node Not Found
+AAI_6115=5:4:ERROR:6115:400:3000:Unrecognized NodeType
+AAI_6116=5:4:ERROR:6116:400:3000:Unrecognized Property
+AAI_6117=5:4:ERROR:6117:400:3000:Uniqueness constraint violated
+AAI_6118=5:4:WARN:6118:400:3000:Required Field not passed.
+AAI_6120=5:4:WARN:6120:400:3000:Bad Parameter Passed
+AAI_6121=5:4:ERROR:6121:400:3000:Problem with internal AAI reference data
+AAI_6122=5:4:ERROR:6122:400:3000:Data Set not complete in DB for this request
+AAI_6123=5:4:ERROR:6123:500:3000:Bad Data found by DataGrooming Tool - Investigate
+AAI_6124=5:4:ERROR:6124:500:3000:File read/write error
+AAI_6125=5:4:WARN:6125:500:3000:Problem Pulling Data Set
+AAI_6126=5:4:ERROR:6126:400:3000:Edge cannot be deleted
+AAI_6127=5:4:INFO:6127:404:3001:Edge Not Found
+AAI_6128=5:4:INFO:6128:500:3000:Unexpected error
+AAI_6129=5:4:INFO:6129:404:3003:Error making edge to target node
+AAI_6130=5:4:WARN:6130:412:3000:Precondition Required
+AAI_6131=5:4:WARN:6131:412:3000:Precondition Failed
+AAI_6132=5:4:WARN:6132:400:3000:Bad Model Definition
+AAI_6133=5:4:WARN:6133:400:3000:Bad Named Query Definition
+AAI_6134=5:4:ERROR:6134:500:6134:Could not persist transaction to storage back end. Exhausted retry amount
+AAI_6135=5:4:WARN:6135:412:3000:Resource version specified on create
+AAI_6136=5:4:ERROR:6136:400:3000:Object cannot hold multiple items
+AAI_6137=5:4:ERROR:6137:400:3000:Cannot perform writes on multiple vertices
+AAI_6138=5:4:ERROR:6138:400:3000:Cannot delete multiple vertices
+AAI_6139=5:4:ERROR:6139:404:3000:Attempted to add edge to vertex that does not exist
+AAI_6140=5:4:ERROR:6140:400:3000:Edge multiplicity violated
+AAI_6141=5:4:WARN:6141:400:3000:Please Refine Query
+AAI_6142=5:4:INFO:6142:400:3000:Retrying transaction
+AAI_6143=5:4:INFO:6143:400:3000:Ghost vertex found
+AAI_6144=5:4:WARN:6144:400:3000:Cycle found in graph
+AAI_6145=5:4:ERROR:6145:400:3000:Cannot create a nested/containment edge via relationship
+AAI_6146=5:4:ERROR:6146:400:3000:Ambiguous identity map found, use a URI instead
+AAI_6147=5:4:ERROR:6147:400:3000:Payload Limit Reached, reduce payload
+
+#--- aaicsvp: 7101-7199
+AAI_7101=5:4:ERROR:7101:500:3002:Unexpected error in CSV file processing
+AAI_7102=5:4:ERROR:7102:500:3002:Error in cleanup temporary directory
+#AAI_7103=4:2:ERROR:7103:500:3002:Unsupported user
+AAI_7104=5:4:ERROR:7104:500:3002:Failed to create directory
+AAI_7105=5:4:ERROR:7105:500:3002:Temporary directory exists
+AAI_7106=5:4:ERROR:7106:500:3002:Cannot delete
+AAI_7107=5:4:ERROR:7107:500:3002:Input file does not exist
+AAI_7108=5:4:ERROR:7108:500:3002:Output file does not exist
+AAI_7109=5:4:ERROR:7109:500:3002:Error closing file
+AAI_7110=5:4:ERROR:7110:500:3002:Error loading/reading properties file
+AAI_7111=5:4:ERROR:7111:500:3002:Error executing shell script
+AAI_7112=5:4:ERROR:7112:500:3002:Error creating output file
+AAI_7113=5:4:ERROR:7113:500:3002:Trailer record error
+AAI_7114=5:4:ERROR:7114:500:3002:Input file error
+AAI_7115=5:4:ERROR:7115:500:3002:Unexpected error
+AAI_7116=5:4:ERROR:7116:500:3002:Request error
+AAI_7117=5:4:ERROR:7117:500:3002:Error in get http client object
+AAI_7118=5:4:ERROR:7118:500:3002:Script Error
+AAI_7119=5:4:ERROR:7119:500:3002:Unknown host
+
+#--- aaisdnc: 7201-7299
+AAI_7202=5:4:ERROR:7202:500:3002:Error getting connection to odl
+AAI_7203=5:4:ERROR:7203:500:3002:Unexpected error calling DataChangeNotification API
+AAI_7204=5:4:ERROR:7204:500:3002:Error returned by DataChangeNotification API
+AAI_7205=5:4:ERROR:7205:500:3002:Unexpected error running notifySDNCOnUpdate
+#AAI_7206=5:4:ERROR:7206:500:3002:Invalid data returned from ODL
+
+#--- NotificationEvent, using UEB space
+AAI_7350=5:4:ERROR:7305:500:3002:Notification event creation failed
+
+#--- aairestctlr: 7401-7499
+AAI_7401=5:4:ERROR:7401:500:3002:Error connecting to AAI REST API
+AAI_7402=5:4:ERROR:7402:500:3002:Unexpected error
+AAI_7403=5:4:WARN:7403:400:3001:Request error
+AAI_7404=5:4:INFO:7404:404:3001:Node not found
+AAI_7405=5:4:WARN:7405:200:0:UUID not formatted correctly, generating UUID
+AAI_7406=5:4:ERROR:7406:400:7406:Request Timed Out
+
+#--- aaicsiovals: 7501-7599
+#AAI_7501=5:4:WARN:7501:500:3002:Error getting connection to CSI-OVALS
+AAI_7502=5:4:WARN:7502:500:3002:Bad parameter when trying to build request for CSI-OVALS
+AAI_7503=5:4:WARN:7503:500:3002:Error returned by CSI-OVALS
+
+#--- aaiauth: 9101-9199
+AAI_9101=5:0:WARN:9101:403:3300:User is not authorized to perform function
+#AAI_9102=5:0:WARN:9102:401:3301:Refresh credentials from source
+#AAI_9103=5:0:WARN:9103:403:3300:User not found
+#AAI_9104=5:0:WARN:9104:401:3302:Authentication error
+#AAI_9105=5:0:WARN:9105:403:3300:Authorization error
+#AAI_9106=5:0:WARN:9106:403:3300:Invalid AppId
+#AAI_9107=5:0:WARN:9107:403:3300:No Username in Request
+AAI_9107=5:0:WARN:9107:403:3300:SSL is not provided in request, please contact admin
+AAI_9108=5:0:WARN:9107:403:3300:Basic auth credentials is not provided in the request
+
+#--- aaiinstar: 9201-9299
+#AAI_9201=5:4:ERROR:9201:500:3002:Unable to send notification
+AAI_9202=5:4:ERROR:9202:500:3002:Unable to start a thread
diff --git a/aai-resources/src/main/resources/etc/appprops/forceDelete-logback.xml b/aai-resources/src/main/resources/etc/appprops/forceDelete-logback.xml
new file mode 100644
index 0000000..16ea13c
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/forceDelete-logback.xml
@@ -0,0 +1,85 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="forceDeletelog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/forceDelete/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/forceDelete/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="com.att.eelf" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+ <logger name="org.onap.aai" level="ERROR" additivity="false">
+ <appender-ref ref="forceDeletelog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="forceDeletelog" />
+ </root>
+</configuration> \ No newline at end of file
diff --git a/aai-resources/src/main/resources/etc/appprops/getres-logback.xml b/aai-resources/src/main/resources/etc/appprops/getres-logback.xml
new file mode 100644
index 0000000..2a71a22
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/getres-logback.xml
@@ -0,0 +1,123 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+
+ <appender name="getTool" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/getTool/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/getTool/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="getTooldebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/getTool/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/getTool/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="getToolmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/getTool/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/getTool/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>%-4relative [%thread] %-5level %logger{35} - %msg %n</encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ </logger>
+
+ <root level="INFO">
+ <appender-ref ref="getTool"/>
+ <appender-ref ref="getTooldebug"/>
+ <appender-ref ref="getToolmetric"/>
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/loadDataForDHV-logback.xml b/aai-resources/src/main/resources/etc/appprops/loadDataForDHV-logback.xml
new file mode 100644
index 0000000..cced385
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/loadDataForDHV-logback.xml
@@ -0,0 +1,116 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="loadDataForDHV" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/loadDataForDHV/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/loadDataForDHV/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%m%n</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="loadDataForDHVdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/loadDataForDHV/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/loadDataForDHV/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%m%n</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="loadDataForDHVmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/loadDataForDHV/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/loadDataForDHV/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%m%n</pattern>
+ </encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </logger>
+
+ <root level="INFO">
+ <appender-ref ref="loadDataForDHV"/>
+ <appender-ref ref="loadDataForDHVdebug"/>
+ <appender-ref ref="loadDataForDHVmetric"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/logging.properties b/aai-resources/src/main/resources/etc/appprops/logging.properties
new file mode 100644
index 0000000..e029cc4
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/logging.properties
@@ -0,0 +1,128 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+
+.handlers = 1catalina.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# this is where we will limit logging on components
+org.apache.hadoop.level=WARNING
+org.apache.zookeeper.level=WARNING
+org.reflections.level=WARNING
+com.thinkaurelius.level=WARNING
+
+1catalina.org.apache.juli.FileHandler.level = FINE
+1catalina.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+1catalina.org.apache.juli.FileHandler.prefix = catalina.
+
+2localhost.org.apache.juli.FileHandler.level = FINE
+2localhost.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+2localhost.org.apache.juli.FileHandler.prefix = localhost.
+
+3manager.org.apache.juli.FileHandler.level = FINE
+3manager.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+3manager.org.apache.juli.FileHandler.prefix = manager.
+
+4host-manager.org.apache.juli.FileHandler.level = FINE
+4host-manager.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+4host-manager.org.apache.juli.FileHandler.prefix = host-manager.
+
+java.util.logging.ConsoleHandler.level = INFO
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].handlers = 2localhost.org.apache.juli.FileHandler
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].handlers = 3manager.org.apache.juli.FileHandler
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].handlers = 4host-manager.org.apache.juli.FileHandler
+
+# For example, set the org.apache.catalina.util.LifecycleBase logger to log
+# each component that extends LifecycleBase changing state:
+#org.apache.catalina.util.LifecycleBase.level = FINE
+
+# To see debug messages in TldLocationsCache, uncomment the following line:
+#org.apache.jasper.compiler.TldLocationsCache.level = FINE
+
+
+################################
+# OpenEJB/TomEE specific loggers
+################################
+#
+# ACTIVATE LEVEL/HANDLERS YOU WANT
+# IF YOU ACTIVATE 5tomee.org.apache.juli.FileHandler
+# ADD IT TO handlers LINE LIKE:
+#
+# handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+#
+# LEVELS:
+# =======
+#
+# OpenEJB.level = WARNING
+# OpenEJB.options.level = INFO
+# OpenEJB.server.level = INFO
+# OpenEJB.startup.level = INFO
+# OpenEJB.startup.service.level = WARNING
+# OpenEJB.startup.config.level = INFO
+# OpenEJB.hsql.level = INFO
+# CORBA-Adapter.level = WARNING
+# Transaction.level = WARNING
+# org.apache.activemq.level = SEVERE
+# org.apache.geronimo.level = SEVERE
+# openjpa.level = WARNING
+# OpenEJB.cdi.level = INFO
+# org.apache.webbeans.level = INFO
+# org.apache.openejb.level = FINE
+#
+# HANDLERS:
+# =========
+#
+# OpenEJB.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.options.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.server.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.service.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.config.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.hsql.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# CORBA-Adapter.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# Transaction.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.activemq.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.geronimo.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# openjpa.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.cdi.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.webbeans.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.openejb.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+#
+# TOMEE HANDLER SAMPLE:
+# =====================
+#
+# 5tomee.org.apache.juli.FileHandler.level = FINEST
+# 5tomee.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+# 5tomee.org.apache.juli.FileHandler.prefix = tomee.
+
diff --git a/aai-resources/src/main/resources/etc/appprops/methodMapper.properties b/aai-resources/src/main/resources/etc/appprops/methodMapper.properties
new file mode 100644
index 0000000..bcacfa6
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/methodMapper.properties
@@ -0,0 +1,24 @@
+{
+ "ActiveAndAvailableInventory-CloudNetwork" : [{
+ "logicalName" : "getAAIResource",
+ "method" : "get",
+ "url" : "/aai/*"
+ }, {
+ "logicalName" : "putAAIResource",
+ "method" : "put",
+ "url" : "/aai/*"
+ }, {
+ "logicalName" : "deleteAAIResource",
+ "method" : "delete",
+ "url" : "/aai/*"
+ }, {
+ "logicalName" : "postAAIResource",
+ "method" : "post",
+ "url" : "/aai/*"
+ }, {
+ "logicalName" : "patchAAIResource",
+ "method" : "patch",
+ "url" : "/aai/*"
+ }
+ ]
+}
diff --git a/aai-resources/src/main/resources/etc/appprops/migration-logback.xml b/aai-resources/src/main/resources/etc/appprops/migration-logback.xml
new file mode 100644
index 0000000..74d3d04
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/migration-logback.xml
@@ -0,0 +1,85 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="migrationlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/migration/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/migration/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="com.att.eelf" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+ <logger name="org.onap.aai" level="ERROR" additivity="false">
+ <appender-ref ref="migrationlog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="migrationlog" />
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/preferredRoute.txt b/aai-resources/src/main/resources/etc/appprops/preferredRoute.txt
new file mode 100644
index 0000000..662b0aa
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/preferredRoute.txt
@@ -0,0 +1 @@
+preferredRouteKey=MR1 \ No newline at end of file
diff --git a/aai-resources/src/main/resources/etc/appprops/pullInvData-logback.xml b/aai-resources/src/main/resources/etc/appprops/pullInvData-logback.xml
new file mode 100644
index 0000000..098e1e7
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/pullInvData-logback.xml
@@ -0,0 +1,128 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+
+ <appender name="pullInvData" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/pullInvData/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/pullInvData/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="pullInvDatadebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/pullInvData/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/pullInvData/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="pullInvDatametric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/pullInvData/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/pullInvData/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>%-4relative [%thread] %-5level %logger{35} - %msg %n</encoder>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="org.apache.zookeeper" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="org.apache.hadoop" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="java.net" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ </logger>
+
+ <root level="INFO">
+ <appender-ref ref="pullInvData"/>
+ <appender-ref ref="pullInvDatadebug"/>
+ <appender-ref ref="pullInvDatametric"/>
+ <appender-ref ref="STDOUT"/>
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/appprops/schemaMod-logback.xml b/aai-resources/src/main/resources/etc/appprops/schemaMod-logback.xml
new file mode 100644
index 0000000..0762ee1
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/schemaMod-logback.xml
@@ -0,0 +1,85 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="schemaModlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/schemaMod/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/schemaMod/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="schemaModlog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="schemaModlog" />
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/scripts/run_FixXSD.sh b/aai-resources/src/main/resources/etc/appprops/titan-cached.properties
index 40ffb81..3465453 100644
--- a/aai-resources/src/main/scripts/run_FixXSD.sh
+++ b/aai-resources/src/main/resources/etc/appprops/titan-cached.properties
@@ -1,4 +1,3 @@
-#!/bin/ksh
#
# ============LICENSE_START=======================================================
# org.onap.aai
@@ -21,49 +20,20 @@
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
#
-#
-# The script invokes GetResource java class to get all nodes
-#
-
-echo
-echo `date` " Starting $0"
-
-REV=$1
-AAIHOME=$2
-echo "AAIHOME: $AAIHOME"
-
-if test "$REV" = ""
-then
- REV=v6
-fi
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
-
-if test "$AAIHOME" = ""
-then
- AAIHOME=$PROJECT_HOME
-fi
-
-
-for JAR in `ls $AAIHOME/target/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-export REV=$REV
-echo "FixXSD $REV $AAIHOME"
-$JAVA_HOME/bin/java -classpath $CLASSPATH -DAJSC_HOME=$AAIHOME org.onap.aai.util.FixXSDNew $REV
-ret_code=$?
-if [ $ret_code != 0 ]; then
- echo `date` " Done $0"
- exit $ret_code
-fi
-
-echo `date` " Done $0"
-exit 0
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+storage.hostname=localhost
+
+#schema.default=none
+storage.lock.wait-time=300
+storage.hbase.table=aaigraph-dev1.dev
+storage.hbase.ext.zookeeper.znode.parent=/hbase
+#caching on
+cache.db-cache = true
+cache.db-cache-clean-wait = 20
+cache.db-cache-time = 180000
+cache.db-cache-size = 0.3
+
+#load graphson file on startup
+load.snapshot.file=false
diff --git a/aai-resources/src/main/resources/etc/appprops/titan-realtime.properties b/aai-resources/src/main/resources/etc/appprops/titan-realtime.properties
new file mode 100644
index 0000000..869872c
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/titan-realtime.properties
@@ -0,0 +1,36 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+storage.hostname=localhost
+
+#schema.default=none
+storage.lock.wait-time=300
+storage.hbase.table=aaigraph-dev1.dev
+storage.hbase.ext.zookeeper.znode.parent=/hbase
+# Setting db-cache to false ensure the fastest propagation of changes across servers
+cache.db-cache = false
+
+#load graphson file on startup
+load.snapshot.file=false
diff --git a/aai-resources/src/main/resources/etc/appprops/uniquePropertyCheck-logback.xml b/aai-resources/src/main/resources/etc/appprops/uniquePropertyCheck-logback.xml
new file mode 100644
index 0000000..5f44b1b
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/appprops/uniquePropertyCheck-logback.xml
@@ -0,0 +1,85 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+ <appender name="uniquePropertyChecklog" class="ch.qos.logback.classic.sift.SiftingAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <!-- This is MDC value -->
+ <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+ <discriminator>
+ <key>logFilenameAppender</key>
+ <defaultValue>undefined</defaultValue>
+ </discriminator>
+ <sift>
+ <!-- A standard RollingFileAppender, the log file is based on 'logFileName'
+ at runtime -->
+ <appender name="FILE-${logFilenameAppender}"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+ </encoder>
+ </appender>
+ </sift>
+ </appender>
+
+ <logger name="org.reflections" level="WARN" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="com.thinkaurelius" level="WARN" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+ <logger name="org.onap.aai" level="INFO" additivity="false">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </logger>
+
+
+ <root level="INFO">
+ <appender-ref ref="uniquePropertyChecklog" />
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/auth/aai-client-cert.p12 b/aai-resources/src/main/resources/etc/auth/aai-client-cert.p12
new file mode 100644
index 0000000..9f2acc9
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/auth/aai-client-cert.p12
Binary files differ
diff --git a/aai-resources/src/main/resources/etc/auth/aai_keystore b/aai-resources/src/main/resources/etc/auth/aai_keystore
new file mode 100644
index 0000000..1ddef0c
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/auth/aai_keystore
Binary files differ
diff --git a/aai-resources/src/main/config/realm.properties b/aai-resources/src/main/resources/etc/auth/realm.properties
index fb692cc..fb692cc 100644
--- a/aai-resources/src/main/config/realm.properties
+++ b/aai-resources/src/main/resources/etc/auth/realm.properties
diff --git a/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json
new file mode 100644
index 0000000..5c28c8b
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json
@@ -0,0 +1,14 @@
+{
+ "pserver" : {
+ "cousins" : ["zone", "complex", "availability-zone", "cloud-region"],
+ "parents":[]
+ },
+ "complex" : {
+ "cousins":[],
+ "parents":[]
+ },
+ "zone" : {
+ "cousins":["complex"],
+ "parents":[]
+ }
+}
diff --git a/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json
new file mode 100644
index 0000000..b955757
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json
@@ -0,0 +1,22 @@
+{
+ "cloud-region": {
+ "cousins" : [],
+ "parents":[]
+ },
+ "availability-zone": {
+ "cousins" : [],
+ "parents":["cloud-region"]
+ },
+ "pserver" : {
+ "cousins" : ["zone", "complex", "availability-zone"],
+ "parents":[]
+ },
+ "complex" : {
+ "cousins":[],
+ "parents":[]
+ },
+ "zone" : {
+ "cousins":["complex"],
+ "parents":[]
+ }
+}
diff --git a/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json
new file mode 100644
index 0000000..a0dfae5
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json
@@ -0,0 +1,14 @@
+{
+ "pserver" : {
+ "cousins" : ["zone", "complex"],
+ "parents":[]
+ },
+ "complex" : {
+ "cousins":[],
+ "parents":[]
+ },
+ "zone" : {
+ "cousins":["complex"],
+ "parents":[]
+ }
+}
diff --git a/aai-resources/src/main/resources/etc/tmp-config/hbase-site.xml b/aai-resources/src/main/resources/etc/tmp-config/hbase-site.xml
new file mode 100644
index 0000000..a2fcca6
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/tmp-config/hbase-site.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<!--
+OVERVIEW
+
+The important configs. are listed near the top. You should change
+at least the setting for hbase.tmp.dir. Other settings will change
+dependent on whether you are running hbase in standalone mode or
+distributed. See the hbase reference guide for requirements and
+guidance making configuration.
+
+This file does not contain all possible configurations. The file would be
+much larger if it carried everything. The absent configurations will only be
+found through source code reading. The idea is that such configurations are
+exotic and only those who would go to the trouble of reading a particular
+section in the code would be knowledgeable or invested enough in ever wanting
+to alter such configurations, so we do not list them here. Listing all
+possible configurations would overwhelm and obscure the important.
+-->
+
+<configuration>
+ <property>
+ <name>hbase.client.scanner.timeout.period</name>
+ <value>600000</value>
+ <description>Client scanner lease period in milliseconds.</description>
+ </property>
+ <property>
+ <name>hbase.rpc.timeout</name>
+ <value>6000000</value>
+ <description>hbase rpc timeout</description>
+ </property>
+</configuration>
diff --git a/aai-resources/src/main/resources/etc/tosca-migration-data/edgeLabelMigration.csv b/aai-resources/src/main/resources/etc/tosca-migration-data/edgeLabelMigration.csv
new file mode 100644
index 0000000..53639e5
--- /dev/null
+++ b/aai-resources/src/main/resources/etc/tosca-migration-data/edgeLabelMigration.csv
@@ -0,0 +1,212 @@
+from,to,label,direction,multiplicity,contains-other-v,delete-other-v,SVC-INFRA,prevent-delete,new from,new to,new label,new direction,new multiplicity,new contains-other-v,new delete-other-v,new SVC-INFRA,new prevent-delete,new default
+allotted-resource,allotted-resource,bindsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,allotted-resource,allotted-resource,tosca.relationships.network.BindsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,T
+allotted-resource,generic-vnf,isPartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,allotted-resource,generic-vnf,org.onap.relationships.inventory.PartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+allotted-resource,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,allotted-resource,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+allotted-resource,l3-network,isPartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,allotted-resource,l3-network,org.onap.relationships.inventory.PartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+allotted-resource,l-interface,uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,allotted-resource,l-interface,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+allotted-resource,network-policy,uses,OUT,ONE2ONE,NONE,NONE,NONE,NONE,allotted-resource,network-policy,org.onap.relationships.inventory.Uses,OUT,ONE2ONE,NONE,NONE,NONE,NONE,T
+allotted-resource,vlan,isPartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,allotted-resource,vlan,org.onap.relationships.inventory.PartOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+allotted-resource,vpn-binding,belongsTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},allotted-resource,vpn-binding,org.onap.relationships.inventory.BelongsTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+allotted-resource,tunnel-xconnect,has,OUT,ONE2ONE,${direction},${direction},NONE,NONE,tunnel-xconnect,allotted-resource,org.onap.relationships.inventory.BelongsTo,OUT,ONE2ONE,!${direction},!${direction},NONE,NONE,T
+availability-zone,complex,groupsResourcesIn,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},availability-zone,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+availability-zone,service-capability,supportsServiceCapability,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},availability-zone,service-capability,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+cloud-region,availability-zone,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},availability-zone,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+cloud-region,complex,locatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},cloud-region,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+cloud-region,l3-network,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,cloud-region,l3-network,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+cloud-region,zone,isMemberOf,OUT,MANY2ONE,NONE,NONE,NONE,NONE,cloud-region,zone,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+cloud-region,dvs-switch,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},dvs-switch,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,flavor,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},flavor,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,group-assignment,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},group-assignment,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,image,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},image,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,oam-network,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},oam-network,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,snapshot,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},snapshot,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,tenant,has,OUT,ONE2MANY,${direction},${direction},!${direction},${direction},tenant,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,${direction},!${direction},T
+cloud-region,vip-ipv4-address-list,hasIpAddress,OUT,ONE2MANY,${direction},${direction},${direction},NONE,vip-ipv4-address-list,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,vip-ipv6-address-list,hasIpAddress,OUT,ONE2MANY,${direction},${direction},${direction},NONE,vip-ipv6-address-list,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+cloud-region,volume-group,has,OUT,ONE2MANY,${direction},${direction},NONE,${direction},volume-group,cloud-region,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,NONE,!${direction},T
+complex,l3-network,usesL3Network,OUT,MANY2MANY,NONE,NONE,${direction},NONE,complex,l3-network,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+complex,ctag-pool,hasCtagPool,OUT,MANY2MANY,${direction},${direction},NONE,NONE,ctag-pool,complex,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+configuration,allotted-resource,uses,OUT,ONE2ONE,NONE,${direction},NONE,NONE,configuration,allotted-resource,org.onap.relationships.inventory.Uses,OUT,ONE2ONE,NONE,${direction},NONE,NONE,T
+configuration,logical-link,has,OUT,ONE2MANY,NONE,${direction},NONE,NONE,configuration,logical-link,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,${direction},NONE,NONE,T
+configuration,metadatum,owns,OUT,ONE2MANY,${direction},${direction},NONE,NONE,metadatum,configuration,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+connector,virtual-data-center,contains,OUT,MANY2MANY,NONE,NONE,NONE,NONE,connector,virtual-data-center,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+connector,metadatum,hasMetaData,OUT,MANY2MANY,${direction},${direction},NONE,NONE,metadatum,connector,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+constrained-element-set,element-choice-set,uses,OUT,ONE2MANY,${direction},${direction},NONE,NONE,element-choice-set,constrained-element-set,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+ctag-pool,availability-zone,supportsAvailabilityZone,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},ctag-pool,availability-zone,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+customer,service-subscription,subscribesTo,OUT,MANY2MANY,${direction},${direction},!${direction},NONE,service-subscription,customer,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},${direction},NONE,T
+dvs-switch,availability-zone,existsIn,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},dvs-switch,availability-zone,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+element-choice-set,model-element,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,model-element,element-choice-set,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+generic-vnf,entitlement,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,entitlement,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+generic-vnf,availability-zone,hasAvailabilityZone,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},generic-vnf,availability-zone,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},T
+generic-vnf,complex,locatedIn,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},generic-vnf,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+generic-vnf,configuration,uses,OUT,ONE2MANY,NONE,${direction},NONE,NONE,generic-vnf,configuration,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,${direction},NONE,NONE,T
+generic-vnf,ctag-pool,usesCtagPool,OUT,MANY2MANY,NONE,NONE,NONE,NONE,generic-vnf,ctag-pool,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+generic-vnf,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,generic-vnf,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+generic-vnf,ipsec-configuration,uses,OUT,MANY2ONE,NONE,NONE,NONE,NONE,generic-vnf,ipsec-configuration,org.onap.relationships.inventory.Uses,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+generic-vnf,l3-network,usesL3Network,OUT,MANY2MANY,NONE,NONE,${direction},NONE,generic-vnf,l3-network,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+generic-vnf,license-key-resource,uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},generic-vnf,license-key-resource,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+generic-vnf,pnf,hostedOn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,generic-vnf,pnf,tosca.relationships.HostedOn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+generic-vnf,pserver,runsOnPserver,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},generic-vnf,pserver,tosca.relationships.HostedOn,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},T
+generic-vnf,vnfc,uses,OUT,ONE2MANY,NONE,${direction},${direction},NONE,vnfc,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,NONE,!${direction},!${direction},NONE,T
+generic-vnf,vnf-image,usesVnfImage,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},generic-vnf,vnf-image,org.onap.relationships.inventory.Uses,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+generic-vnf,volume-group,uses,OUT,ONE2MANY,NONE,NONE,${direction},NONE,generic-vnf,volume-group,org.onap.relationships.inventory.DependsOn,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+generic-vnf,vserver,runsOnVserver,OUT,ONE2MANY,NONE,NONE,${direction},NONE,generic-vnf,vserver,tosca.relationships.HostedOn,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+generic-vnf,lag-interface,hasLAGInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,lag-interface,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+generic-vnf,license,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,license,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+generic-vnf,l-interface,hasLInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l-interface,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+generic-vnf,network-profile,hasNetworkProfile,OUT,MANY2MANY,NONE,NONE,NONE,NONE,network-profile,generic-vnf,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+generic-vnf,service-instance,hasInstance,OUT,MANY2MANY,NONE,NONE,${direction},NONE,service-instance,generic-vnf,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,!${direction},NONE,T
+generic-vnf,site-pair-set,hasSitePairSet,OUT,MANY2MANY,NONE,NONE,NONE,NONE,site-pair-set,generic-vnf,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+generic-vnf,vf-module,has,OUT,ONE2MANY,${direction},${direction},${direction},NONE,vf-module,generic-vnf,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+group-assignment,pserver,has,OUT,ONE2MANY,NONE,NONE,NONE,!${direction},pserver,group-assignment,org.onap.relationships.inventory.MemberOf,OUT,MANY2ONE,NONE,NONE,NONE,${direction},T
+group-assignment,tenant,has,OUT,MANY2MANY,NONE,NONE,NONE,NONE,tenant,group-assignment,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+image,metadatum,hasMetaDatum,OUT,MANY2MANY,${direction},${direction},NONE,NONE,metadatum,image,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+instance-group,model,targets,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},instance-group,model,org.onap.relationships.inventory.Targets,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+ipsec-configuration,vig-server,hasVigServer,OUT,ONE2MANY,${direction},${direction},NONE,NONE,vig-server,ipsec-configuration,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+l3-interface-ipv4-address-list,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,l3-interface-ipv4-address-list,instance-group,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l3-interface-ipv4-address-list,l3-network,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},NONE,l3-interface-ipv4-address-list,l3-network,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+l3-interface-ipv4-address-list,subnet,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},l3-interface-ipv4-address-list,subnet,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},T
+l3-interface-ipv6-address-list,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,l3-interface-ipv6-address-list,instance-group,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l3-interface-ipv6-address-list,l3-network,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},NONE,l3-interface-ipv6-address-list,l3-network,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+l3-interface-ipv6-address-list,subnet,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},l3-interface-ipv6-address-list,subnet,org.onap.relationships.inventory.network.MemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},T
+l3-network,ctag-assignment,hasCtagAssignment,OUT,MANY2MANY,${direction},${direction},${direction},NONE,ctag-assignment,l3-network,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+l3-network,instance-group,memberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,l3-network,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l3-network,network-policy,uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,l3-network,network-policy,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+l3-network,route-table-reference,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,l3-network,route-table-reference,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l3-network,vpn-binding,usesVpnBinding,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},l3-network,vpn-binding,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+l3-network,segmentation-assignment,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,segmentation-assignment,l3-network,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+l3-network,service-instance,hasInstance,OUT,MANY2MANY,NONE,NONE,!${direction},NONE,service-instance,l3-network,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+l3-network,subnet,hasSubnet,OUT,MANY2MANY,${direction},${direction},!${direction},NONE,subnet,l3-network,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},${direction},NONE,T
+lag-interface,lag-link,usesLAGLink,OUT,MANY2MANY,NONE,${direction},${direction},NONE,lag-interface,lag-link,tosca.relationships.network.LinksTo,OUT,MANY2MANY,NONE,${direction},${direction},NONE,T
+lag-interface,logical-link,uses,OUT,MANY2MANY,NONE,${direction},${direction},NONE,lag-interface,logical-link,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,${direction},${direction},NONE,T
+lag-interface,p-interface,usesPInterface,OUT,MANY2MANY,NONE,NONE,${direction},NONE,lag-interface,p-interface,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+lag-interface,l-interface,hasLInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l-interface,lag-interface,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+line-of-business,generic-vnf,realizedBy,OUT,MANY2MANY,NONE,NONE,NONE,NONE,line-of-business,generic-vnf,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l-interface,l3-interface-ipv4-address-list,hasIpAddress,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l3-interface-ipv4-address-list,l-interface,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+l-interface,l3-interface-ipv6-address-list,hasIpAddress,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l3-interface-ipv6-address-list,l-interface,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+l-interface,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,l-interface,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+l-interface,l-interface,has,OUT,ONE2MANY,${direction},${direction},${direction},NONE,l-interface,l-interface,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+l-interface,logical-link,usesLogicalLink,OUT,MANY2MANY,NONE,${direction},${direction},NONE,l-interface,logical-link,tosca.relationships.network.LinksTo,OUT,MANY2ONE,NONE,${direction},${direction},NONE,T
+l-interface,logical-link,sourceLInterface,OUT,MANY2MANY,NONE,${direction},${direction},NONE,logical-link,l-interface,org.onap.relationships.inventory.Source,OUT,ONE2MANY,NONE,!${direction},!${direction},NONE,F
+l-interface,logical-link,targetLInterface,OUT,MANY2MANY,NONE,${direction},${direction},NONE,logical-link,l-interface,org.onap.relationships.inventory.Destination,OUT,ONE2MANY,NONE,!${direction},!${direction},NONE,F
+l-interface,sriov-vf,has,OUT,ONE2ONE,${direction},${direction},NONE,NONE,sriov-vf,l-interface,org.onap.relationships.inventory.BelongsTo,OUT,ONE2ONE,!${direction},!${direction},NONE,NONE,T
+l-interface,vlan,hasVlan,OUT,MANY2MANY,${direction},${direction},NONE,NONE,vlan,l-interface,tosca.relationships.network.LinksTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+logical-link,cloud-region,existsIn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,logical-link,cloud-region,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+logical-link,generic-vnf,bridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,logical-link,generic-vnf,org.onap.relationships.inventory.BridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+logical-link,lag-link,usesLAGLink,OUT,MANY2MANY,NONE,NONE,${direction},NONE,logical-link,lag-link,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+logical-link,logical-link,uses,OUT,ONE2MANY,NONE,NONE,${direction},NONE,logical-link,logical-link,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+logical-link,pnf,bridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,logical-link,pnf,org.onap.relationships.inventory.BridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+logical-link,pserver,bridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},logical-link,pserver,org.onap.relationships.inventory.BridgedTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+logical-link,vpn-binding,uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},logical-link,vpn-binding,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+model,model-ver,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,model-ver,model,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-constraint,constrained-element-set,uses,OUT,ONE2MANY,${direction},${direction},NONE,NONE,constrained-element-set,model-constraint,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-element,constrained-element-set,connectsTo,OUT,ONE2MANY,${direction},${direction},NONE,NONE,constrained-element-set,model-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-element,model-constraint,uses,OUT,ONE2MANY,${direction},${direction},NONE,NONE,model-constraint,model-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-element,model-element,connectsTo,OUT,ONE2MANY,${direction},${direction},NONE,NONE,model-element,model-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-element,model-ver,isA,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},model-element,model-ver,org.onap.relationships.inventory.IsA,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+model-ver,metadatum,hasMetaDatum,OUT,ONE2MANY,${direction},${direction},NONE,NONE,metadatum,model-ver,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+model-ver,model-element,startsWith,OUT,ONE2MANY,${direction},${direction},NONE,NONE,model-element,model-ver,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+named-query,model,relatedTo,OUT,ONE2MANY,NONE,NONE,NONE,!${direction},named-query,model,org.onap.relationships.inventory.AppliesTo,OUT,ONE2MANY,NONE,NONE,NONE,!${direction},T
+named-query,named-query-element,startsWith,OUT,ONE2ONE,${direction},${direction},NONE,NONE,named-query-element,named-query,org.onap.relationships.inventory.BelongsTo,OUT,ONE2ONE,!${direction},!${direction},NONE,NONE,T
+named-query-element,model,isA,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},named-query-element,model,org.onap.relationships.inventory.IsA,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+named-query-element,named-query-element,connectsTo,OUT,MANY2MANY,${direction},${direction},NONE,NONE,named-query-element,named-query-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+named-query-element,property-constraint,uses,OUT,ONE2MANY,${direction},${direction},NONE,NONE,property-constraint,named-query-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+named-query-element,related-lookup,uses,OUT,ONE2MANY,${direction},${direction},NONE,NONE,related-lookup,named-query-element,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+newvce,l-interface,hasLInterface,OUT,MANY2MANY,${direction},${direction},NONE,NONE,l-interface,newvce,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+oam-network,complex,definedFor,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},oam-network,complex,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+oam-network,service-capability,supportsServiceCapability,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},oam-network,service-capability,org.onap.relationships.inventory.AppliesTo,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+operational-environment,operational-environment,managedBy,OUT,ONE2ONE,NONE,NONE,NONE,NONE,operational-environment,operational-environment,org.onap.relationships.inventory.Uses,OUT,ONE2ONE,NONE,NONE,NONE,NONE,T
+owning-entity,service-instance,owns,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,owning-entity,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+p-interface,l-interface,hasLInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l-interface,p-interface,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+p-interface,logical-link,usesLogicalLink,OUT,MANY2ONE,NONE,NONE,${direction},NONE,p-interface,logical-link,tosca.relationships.network.LinksTo,OUT,MANY2ONE,NONE,NONE,${direction},NONE,T
+p-interface,physical-link,usesPhysicalLink,OUT,MANY2MANY,NONE,${direction},NONE,NONE,p-interface,physical-link,tosca.relationships.network.LinksTo,OUT,MANY2ONE,NONE,${direction},NONE,NONE,T
+p-interface,sriov-pf,has,OUT,ONE2ONE,${direction},${direction},NONE,NONE,sriov-pf,p-interface,org.onap.relationships.inventory.BelongsTo,OUT,ONE2ONE,!${direction},!${direction},NONE,NONE,T
+platform,generic-vnf,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,platform,generic-vnf,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+pnf,lag-interface,has,OUT,ONE2MANY,${direction},${direction},${direction},NONE,lag-interface,pnf,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+pnf,p-interface,hasPinterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,p-interface,pnf,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+pnf,complex,locatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},pnf,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+pnf,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,pnf,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+pnf,zone,isMemberOf,OUT,MANY2ONE,NONE,NONE,NONE,NONE,pnf,zone,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+port-group,cvlan-tag,hasCTag,OUT,MANY2MANY,${direction},${direction},${direction},NONE,cvlan-tag,port-group,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+project,service-instance,created,OUT,ONE2MANY,NONE,NONE,NONE,NONE,project,service-instance,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+pserver,lag-interface,hasLAGInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,lag-interface,pserver,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+pserver,p-interface,hasPinterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,p-interface,pserver,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+pserver,availability-zone,existsIn,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},pserver,availability-zone,org.onap.relationships.inventory.MemberOf,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+pserver,cloud-region,locatedIn,OUT,MANY2ONE,NONE,NONE,${direction},NONE,pserver,cloud-region,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,${direction},NONE,T
+pserver,complex,locatedIn,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},pserver,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+pserver,zone,isMemberOf,OUT,MANY2ONE,NONE,NONE,NONE,NONE,pserver,zone,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+routing-instance,site-pair,hasSitePair,OUT,MANY2MANY,${direction},${direction},NONE,NONE,site-pair,routing-instance,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+service-instance,allotted-resource,has,OUT,MANY2MANY,${direction},${direction},NONE,NONE,allotted-resource,service-instance,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+service-instance,metadatum,hasMetaData,OUT,MANY2MANY,${direction},${direction},NONE,NONE,metadatum,service-instance,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+service-instance,allotted-resource,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,service-instance,allotted-resource,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+service-instance,configuration,has,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,configuration,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+service-instance,connector,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,service-instance,connector,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+service-instance,ctag-assignment,uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,ctag-assignment,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+service-instance,cvlan-tag,hasIPAGFacingVLAN,OUT,MANY2MANY,NONE,NONE,NONE,NONE,service-instance,cvlan-tag,org.onap.relationships.inventory.ComposedOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+service-instance,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,service-instance,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+service-instance,logical-link,uses,OUT,MANY2MANY,NONE,${direction},NONE,NONE,service-instance,logical-link,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,${direction},NONE,NONE,T
+service-instance,pnf,uses,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,pnf,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+service-instance,service-instance,dependsOn,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,service-instance,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+service-instance,vlan,dependsOn,OUT,ONE2MANY,NONE,NONE,NONE,NONE,service-instance,vlan,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+service-instance,zone,locatedIn,OUT,MANY2ONE,NONE,NONE,${direction},NONE,service-instance,zone,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,${direction},NONE,T
+service-subscription,service-instance,hasInstance,OUT,MANY2MANY,${direction},${direction},!${direction},NONE,service-instance,service-subscription,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},${direction},NONE,T
+site-pair,class-of-service,hasClassOfService,OUT,MANY2MANY,${direction},${direction},NONE,NONE,class-of-service,site-pair,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+site-pair-set,routing-instance,hasRoutingInstance,OUT,MANY2MANY,${direction},${direction},NONE,NONE,routing-instance,site-pair-set,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+sriov-vf,sriov-pf,uses,OUT,MANY2ONE,NONE,NONE,NONE,NONE,sriov-vf,sriov-pf,org.onap.relationships.inventory.Uses,OUT,MANY2ONE,NONE,NONE,NONE,NONE,T
+subnet,host-route,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,host-route,subnet,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+tenant,service-subscription,relatedTo,OUT,MANY2MANY,NONE,NONE,NONE,NONE,service-subscription,tenant,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+tenant,l3-network,usesL3Network,OUT,MANY2MANY,NONE,NONE,NONE,NONE,tenant,l3-network,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+tenant,vserver,owns,OUT,ONE2MANY,${direction},${direction},!${direction},${direction},vserver,tenant,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},NONE,${direction},!${direction},T
+vce,entitlement,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,entitlement,vce,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vce,license,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,license,vce,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vce,port-group,hasPortGroup,OUT,MANY2MANY,${direction},${direction},${direction},NONE,port-group,vce,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+vce,service-instance,hasServiceInstance,OUT,MANY2MANY,NONE,NONE,!${direction},NONE,service-instance,vce,org.onap.relationships.inventory.ComposedOf,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+vce,availability-zone,hasAvailabilityZone,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},vce,availability-zone,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+vce,complex,locatedIn,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},vce,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},T
+vce,vserver,runsOnVserver,OUT,MANY2MANY,NONE,NONE,${direction},NONE,vce,vserver,tosca.relationships.HostedOn,OUT,ONE2MANY,NONE,NONE,${direction},NONE,T
+vf-module,l3-network,uses,OUT,MANY2MANY,NONE,NONE,${direction},NONE,vf-module,l3-network,org.onap.relationships.inventory.DependsOn,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+vf-module,vnfc,uses,OUT,ONE2MANY,NONE,${direction},${direction},${direction},vf-module,vnfc,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,${direction},${direction},T
+vf-module,volume-group,uses,OUT,ONE2ONE,NONE,NONE,${direction},NONE,vf-module,volume-group,org.onap.relationships.inventory.Uses,OUT,ONE2ONE,NONE,NONE,${direction},NONE,T
+vip-ipv4-address-list,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vip-ipv4-address-list,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vip-ipv4-address-list,subnet,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},vip-ipv4-address-list,subnet,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+vip-ipv6-address-list,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vip-ipv6-address-list,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vip-ipv6-address-list,subnet,isMemberOf,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},vip-ipv6-address-list,subnet,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},T
+virtual-data-center,generic-vnf,hasVNF,OUT,MANY2MANY,NONE,NONE,!${direction},NONE,generic-vnf,virtual-data-center,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,${direction},NONE,T
+virtual-data-center,logical-link,contains,OUT,MANY2MANY,NONE,NONE,NONE,NONE,logical-link,virtual-data-center,org.onap.relationships.inventory.LocatedIn,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vlan,l3-interface-ipv4-address-list,hasIpAddress,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l3-interface-ipv4-address-list,vlan,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+vlan,l3-interface-ipv6-address-list,hasIpAddress,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l3-interface-ipv6-address-list,vlan,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+vlan,logical-link,usesLogicalLink,OUT,MANY2MANY,NONE,${direction},${direction},NONE,vlan,logical-link,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,${direction},${direction},NONE,T
+vlan,multicast-configuration,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vlan,multicast-configuration,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vnfc,l3-interface-ipv4-address-list,hasIpAddress,OUT,ONE2MANY,${direction},${direction},NONE,NONE,l3-interface-ipv4-address-list,vnfc,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vnfc,l3-interface-ipv6-address-list,hasIpAddress,OUT,ONE2MANY,${direction},${direction},NONE,NONE,l3-interface-ipv6-address-list,vnfc,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vnfc,instance-group,isMemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vnfc,instance-group,org.onap.relationships.inventory.MemberOf,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vnfc,vip-ipv4-address-list,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vnfc,vip-ipv4-address-list,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vnfc,vip-ipv6-address-list,uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vnfc,vip-ipv6-address-list,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+volume-group,tenant,belongsTo,OUT,MANY2MANY,NONE,NONE,${direction},NONE,tenant,volume-group,org.onap.relationships.inventory.DependsOn,OUT,ONE2MANY,NONE,NONE,!${direction},NONE,T
+volume-group,complex,existsIn,OUT,MANY2MANY,NONE,NONE,${direction},!${direction},volume-group,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+vpls-pe,lag-interface,hasLAGinterface,OUT,MANY2MANY,${direction},${direction},NONE,NONE,lag-interface,vpls-pe,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vpls-pe,p-interface,hasPinterface,OUT,MANY2MANY,${direction},${direction},NONE,NONE,p-interface,vpls-pe,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vpls-pe,complex,locatedIn,OUT,MANY2MANY,NONE,NONE,NONE,!${direction},vpls-pe,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+vpls-pe,ctag-pool,usesCtagPool,OUT,MANY2MANY,NONE,NONE,NONE,NONE,vpls-pe,ctag-pool,org.onap.relationships.inventory.Uses,OUT,MANY2MANY,NONE,NONE,NONE,NONE,T
+vpn-binding,route-target,has,OUT,ONE2MANY,${direction},${direction},NONE,NONE,route-target,vpn-binding,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+vserver,l-interface,hasLInterface,OUT,MANY2MANY,${direction},${direction},${direction},NONE,l-interface,vserver,tosca.relationships.network.BindsTo,OUT,MANY2ONE,!${direction},!${direction},!${direction},NONE,T
+vserver,vf-module,isPartOf,OUT,MANY2ONE,NONE,NONE,${direction},NONE,vf-module,vserver,org.onap.relationships.inventory.Uses,OUT,ONE2MANY,NONE,NONE,!${direction},NONE,T
+vserver,vnfc,hosts,OUT,MANY2MANY,NONE,NONE,${direction},NONE,vnfc,vserver,tosca.relationships.HostedOn,OUT,ONE2MANY,NONE,NONE,!${direction},NONE,T
+vserver,flavor,hasFlavor,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},vserver,flavor,org.onap.relationships.inventory.Uses,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+vserver,image,hasImage,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},vserver,image,org.onap.relationships.inventory.Uses,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+vserver,pserver,runsOnPserver,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},vserver,pserver,tosca.relationships.HostedOn,OUT,MANY2ONE,NONE,NONE,${direction},!${direction},T
+vserver,snapshot,uses,OUT,ONE2ONE,NONE,NONE,${direction},NONE,vserver,snapshot,org.onap.relationships.inventory.Uses,OUT,ONE2ONE,NONE,NONE,${direction},NONE,T
+vserver,volume,hasVolume,OUT,MANY2MANY,${direction},${direction},${direction},NONE,vserver,volume,tosca.relationships.AttachesTo,OUT,ONE2MANY,${direction},${direction},${direction},NONE,T
+zone,complex,existsIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},zone,complex,org.onap.relationships.inventory.LocatedIn,OUT,MANY2ONE,NONE,NONE,NONE,!${direction},T
+,,,,,,,,,allotted-resource,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+,,,,,,,,,generic-vnf,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+,,,,,,,,,l3-network,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+,,,,,,,,,logical-link,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+,,,,,,,,,service-instance,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+,,,,,,,,,vf-module,model-ver,org.onap.relationships.inventory.IsA,OUT,Many2One,NONE,NONE,NONE,NONE,T
+configuration,l-interface,has,OUT,ONE2MANY,NONE,NONE,NONE,NONE,configuration,l-interface,org.onap.relationships.inventory.AppliesTo,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+configuration,pnf,has,OUT,ONE2MANY,NONE,NONE,NONE,NONE,configuration,pnf,org.onap.relationships.inventory.AppliesTo,OUT,ONE2MANY,NONE,NONE,NONE,NONE,T
+forwarder,forwarding-path,belongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,forwarder,forwarding-path,org.onap.relationships.inventory.BelongsTo,OUT,MANY2ONE,!${direction},!${direction},NONE,NONE,T
+forwarding-path,service-instance,implements,OUT,MANY2ONE,NONE,!${direction},NONE,NONE,forwarding-path,service-instance,org.onap.relationships.inventory.AppliesTo,OUT,MANY2ONE,NONE,!${direction},NONE,NONE,T
+forwarder,l-interface,forwardsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,forwarder,l-interface,org.onap.relationships.inventory.ForwardsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,T
+forwarder,p-interface,forwardsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,forwarder,p-interface,org.onap.relationships.inventory.ForwardsTo,OUT,ONE2ONE,NONE,NONE,NONE,NONE,T
diff --git a/aai-resources/src/main/resources/localhost-access-logback.xml b/aai-resources/src/main/resources/localhost-access-logback.xml
new file mode 100644
index 0000000..a318796
--- /dev/null
+++ b/aai-resources/src/main/resources/localhost-access-logback.xml
@@ -0,0 +1,62 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+ <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
+ <appender name="ACCESS"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">
+ <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId} %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D</Pattern>
+ </encoder>
+ </appender>
+ <appender-ref ref="ACCESS" />
+</configuration>
+
+<!--
+%a - Remote IP address
+%A - Local IP address
+%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent
+%B - Bytes sent, excluding HTTP headers
+%h - Remote host name
+%H - Request protocol
+%l - Remote logical username from identd (always returns '-')
+%m - Request method
+%p - Local port
+%q - Query string (prepended with a '?' if it exists, otherwise an empty string
+%r - First line of the request
+%s - HTTP status code of the response
+%S - User session ID
+%t - Date and time, in Common Log Format format
+%u - Remote user that was authenticated
+%U - Requested URL path
+%v - Local server name
+%I - current request thread name (can compare later with stacktraces)
+
+%z - Custom pattern that parses the cert for the subject
+%y - Custom pattern determines rest or dme2
+ --> \ No newline at end of file
diff --git a/aai-resources/src/main/resources/logback.xml b/aai-resources/src/main/resources/logback.xml
new file mode 100644
index 0000000..ee7ee61
--- /dev/null
+++ b/aai-resources/src/main/resources/logback.xml
@@ -0,0 +1,391 @@
+<!--
+
+ ============LICENSE_START=======================================================
+ org.onap.aai
+ ================================================================================
+ Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+
+ ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration scan="true" scanPeriod="60 seconds" debug="false">
+ <statusListener class="ch.qos.logback.core.status.NopStatusListener" />
+
+ <property resource="application.properties" />
+
+ <property name="namespace" value="aai-resources"/>
+
+ <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
+ <jmxConfigurator />
+ <property name="logDirectory" value="${AJSC_HOME}/logs" />
+ <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfAuditLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <!-- <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/> -->
+ <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%ecompResponseCode|%ecompResponseDescription|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+ <property name="eelfTransLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{partnerName}:%m%n"/>
+
+ <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+ <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+ <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+ <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>
+ %clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}
+ </pattern>
+ </encoder>
+ </appender>
+
+ <appender name="SANE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/rest/sane.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/sane.log.%d{yyyy-MM-dd}</fileNamePattern>
+ </rollingPolicy>
+ <encoder>
+ <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
+ </pattern>
+ </encoder>
+ </appender>
+
+ <appender name="asyncSANE" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="SANE" />
+ </appender>
+
+ <appender name="METRIC"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <file>${logDirectory}/rest/metrics.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/metrics.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+ <appender name="asyncMETRIC" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="METRIC" />
+ </appender>
+
+ <appender name="DEBUG"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <file>${logDirectory}/rest/debug.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/debug.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="asyncDEBUG" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="DEBUG" />
+ </appender>
+
+ <appender name="ERROR"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <file>${logDirectory}/rest/error.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/error.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfErrorLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="asyncERROR" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="ERROR" />
+ </appender>
+
+ <appender name="AUDIT"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logDirectory}/rest/audit.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/audit.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfAuditLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="asyncAUDIT" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="AUDIT" />
+ </appender>
+
+ <appender name="translog"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <file>${logDirectory}/rest/translog.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/rest/translog.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfTransLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="asynctranslog" class="ch.qos.logback.classic.AsyncAppender">
+ <queueSize>1000</queueSize>
+ <includeCallerData>true</includeCallerData>
+ <appender-ref ref="translog" />
+ </appender>
+
+ <appender name="dmaapAAIEventConsumer"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+
+ <appender name="dmaapAAIEventConsumerDebug"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>DEBUG</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+ <appender name="dmaapAAIEventConsumerMetric"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.LevelFilter">
+ <level>INFO</level>
+ <onMatch>ACCEPT</onMatch>
+ <onMismatch>DENY</onMismatch>
+ </filter>
+ <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfMetricLogPattern}</pattern>
+ </encoder>
+ </appender>
+ <appender name="external"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <level>WARN</level>
+ </filter>
+ <file>${logDirectory}/external/external.log</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <fileNamePattern>${logDirectory}/external/external.log.%d{yyyy-MM-dd}
+ </fileNamePattern>
+ </rollingPolicy>
+ <encoder class="org.onap.aai.logging.EcompEncoder">
+ <pattern>${eelfLogPattern}</pattern>
+ </encoder>
+ </appender>
+ <logger name="org.onap.aai" level="DEBUG" additivity="false">
+ <appender-ref ref="asyncDEBUG" />
+ <appender-ref ref="asyncERROR" />
+ <appender-ref ref="asyncMETRIC" />
+ <appender-ref ref="asyncSANE" />
+ </logger>
+
+ <!-- Spring related loggers -->
+ <logger name="org.springframework" level="WARN" />
+ <logger name="org.springframework.beans" level="WARN" />
+ <logger name="org.springframework.web" level="WARN" />
+ <logger name="com.blog.spring.jms" level="WARN" />
+ <logger name="com.jayway.jsonpath" level="WARN" />
+
+ <!-- AJSC Services (bootstrap services) -->
+ <logger name="ajsc" level="WARN" />
+ <logger name="ajsc.RouteMgmtService" level="WARN" />
+ <logger name="ajsc.ComputeService" level="WARN" />
+ <logger name="ajsc.VandelayService" level="WARN" />
+ <logger name="ajsc.FilePersistenceService" level="WARN" />
+ <logger name="ajsc.UserDefinedJarService" level="WARN" />
+ <logger name="ajsc.UserDefinedBeansDefService" level="WARN" />
+ <logger name="ajsc.LoggingConfigurationService" level="WARN" />
+
+ <!-- AJSC related loggers (DME2 Registration, csi logging, restlet, servlet
+ logging) -->
+ <logger name="org.codehaus.groovy" level="WARN" />
+ <logger name="com.att.scamper" level="WARN" />
+ <logger name="ajsc.utils" level="WARN" />
+ <logger name="ajsc.utils.DME2Helper" level="WARN" />
+ <logger name="ajsc.filters" level="WARN" />
+ <logger name="ajsc.beans.interceptors" level="WARN" />
+ <logger name="ajsc.restlet" level="WARN" />
+ <logger name="ajsc.servlet" level="WARN" />
+ <logger name="com.att.ajsc" level="WARN" />
+ <logger name="com.att.ajsc.csi.logging" level="WARN" />
+ <logger name="com.att.ajsc.filemonitor" level="WARN" />
+ <logger name="com.netflix.loadbalancer" level="WARN" />
+
+ <logger name="org.apache.zookeeper" level="OFF" />
+
+ <!-- Other Loggers that may help troubleshoot -->
+ <logger name="net.sf" level="WARN" />
+ <logger name="org.apache.commons.httpclient" level="WARN" />
+ <logger name="org.apache.commons" level="WARN" />
+ <logger name="org.apache.coyote" level="WARN" />
+ <logger name="org.apache.jasper" level="WARN" />
+
+ <!-- Camel Related Loggers (including restlet/servlet/jaxrs/cxf logging.
+ May aid in troubleshooting) -->
+ <logger name="org.apache.camel" level="WARN" />
+ <logger name="org.apache.cxf" level="WARN" />
+ <logger name="org.apache.camel.processor.interceptor" level="WARN" />
+ <logger name="org.apache.cxf.jaxrs.interceptor" level="WARN" />
+ <logger name="org.apache.cxf.service" level="WARN" />
+ <logger name="org.restlet" level="WARN" />
+ <logger name="org.apache.camel.component.restlet" level="WARN" />
+
+ <logger name="org.hibernate.validator" level="WARN" />
+ <logger name="org.hibernate" level="WARN" />
+ <logger name="org.hibernate.ejb" level="OFF" />
+
+ <!-- logback internals logging -->
+ <logger name="ch.qos.logback.classic" level="WARN" />
+ <logger name="ch.qos.logback.core" level="WARN" />
+
+ <logger name="org.eclipse.jetty" level="WARN" />
+
+ <!-- logback jms appenders & loggers definition starts here -->
+ <appender name="auditLogs"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+ <file>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.log
+ </file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+ </fileNamePattern>
+ <minIndex>1</minIndex>
+ <maxIndex>9</maxIndex>
+ </rollingPolicy>
+ <triggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>5MB</maxFileSize>
+ </triggeringPolicy>
+ <encoder>
+ <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+ </encoder>
+ </appender>
+ <appender name="perfLogs"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+ <file>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.log
+ </file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+ </fileNamePattern>
+ <minIndex>1</minIndex>
+ <maxIndex>9</maxIndex>
+ </rollingPolicy>
+ <triggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>5MB</maxFileSize>
+ </triggeringPolicy>
+ <encoder>
+ <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+ </encoder>
+ </appender>
+ <logger name="AuditRecord" level="INFO" additivity="false">
+ <appender-ref ref="auditLogs" />
+ </logger>
+ <logger name="AuditRecord_DirectCall" level="INFO" additivity="false">
+ <appender-ref ref="auditLogs" />
+ </logger>
+ <logger name="PerfTrackerRecord" level="INFO" additivity="false">
+ <appender-ref ref="perfLogs" />
+ </logger>
+ <!-- logback jms appenders & loggers definition ends here -->
+
+ <logger name="org.onap.aai.interceptors.post" level="DEBUG"
+ additivity="false">
+ <appender-ref ref="asynctranslog" />
+ </logger>
+
+ <logger name="org.onap.aai.interceptors.pre.SetLoggingContext" level="DEBUG">
+ <appender-ref ref="asyncAUDIT"/>
+ </logger>
+
+ <logger name="org.onap.aai.interceptors.post.ResetLoggingContext" level="DEBUG">
+ <appender-ref ref="asyncAUDIT"/>
+ </logger>
+
+ <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+ <appender-ref ref="dmaapAAIEventConsumer" />
+ <appender-ref ref="dmaapAAIEventConsumerDebug" />
+ <appender-ref ref="dmaapAAIEventConsumerMetric" />
+ </logger>
+
+ <logger name="org.apache" level="OFF" />
+ <logger name="org.zookeeper" level="OFF" />
+ <logger name="com.thinkaurelius" level="WARN" />
+ <logger name="com.att.aft.dme2" level="WARN" />
+
+ <!-- ============================================================================ -->
+ <!-- General EELF logger -->
+ <!-- ============================================================================ -->
+ <logger name="com.att.eelf" level="WARN" additivity="false">
+ <appender-ref ref="asyncDEBUG" />
+ <appender-ref ref="asyncERROR" />
+ <appender-ref ref="asyncMETRIC" />
+ </logger>
+
+ <root level="DEBUG">
+ <appender-ref ref="external" />
+ </root>
+</configuration>
diff --git a/aai-resources/src/main/resources/logmessages.properties b/aai-resources/src/main/resources/logmessages.properties
new file mode 100644
index 0000000..59684e4
--- /dev/null
+++ b/aai-resources/src/main/resources/logmessages.properties
@@ -0,0 +1,6 @@
+RESTSERVICE_HELLO=SERVICE0001I|Get a quick hello|No resolution needed|No action is required
+RESTSERVICE_HELLO_NAME=SERVICE0002I|Get a quick hello for {0}|No resolution needed|No action is required
+SPRINSERVICE_HELLO=SERVICE0003I|Say a quick hello|No resolution needed|No action is required
+SPRINSERVICE_HELLO_NAME=SERVICE0004I|Say a quick hello for {0}|No resolution needed|No action is required
+SPRINSERVICE_HELLO_MESSAGE=SERVICE0005I|Say hello message: {0}|No resolution needed|No action is required
+SPRINSERVICE_HELLO_MESSAGE_NAME=SERVICE0006I|Say hello message object:{0}|No resolution needed|No action is required
diff --git a/aai-resources/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context b/aai-resources/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
deleted file mode 100644
index 8514196..0000000
--- a/aai-resources/src/main/runtime/context/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.context
+++ /dev/null
@@ -1 +0,0 @@
-{"context":{"contextClass":"ajsc.Context","contextId":"__module_ajsc_namespace_name__:__module_ajsc_namespace_version__","contextName":"__module_ajsc_namespace_name__","contextVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ Context"}} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/context/default#0.context b/aai-resources/src/main/runtime/context/default#0.context
deleted file mode 100644
index d1b5ab4..0000000
--- a/aai-resources/src/main/runtime/context/default#0.context
+++ /dev/null
@@ -1 +0,0 @@
-{"context":{"contextClass":"ajsc.Context","contextId":"default:0","contextName":"default","contextVersion":"0","description":"Default Context"}} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json b/aai-resources/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
deleted file mode 100644
index d0954cf..0000000
--- a/aai-resources/src/main/runtime/deploymentPackage/__module.ajsc.namespace.name__#__module.ajsc.namespace.version__.json
+++ /dev/null
@@ -1 +0,0 @@
-{"deploymentPackage":{"Class":"ajsc.DeploymentPackage","Id":"__module.ajsc.namespace.name__:__module_ajsc_namespace_version__","namespace":"__module_ajsc_namespace_name__","namespaceVersion":"__module_ajsc_namespace_version__","description":"__module_ajsc_namespace_name__ __module_ajsc_namespace_version__ - default description","userId":"ajsc"}} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroRole/ajscadmin.json b/aai-resources/src/main/runtime/shiroRole/ajscadmin.json
deleted file mode 100644
index f5e981e..0000000
--- a/aai-resources/src/main/runtime/shiroRole/ajscadmin.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"ajscadmin","name":"ajscadmin","permissions":"[ajscadmin:*, ajsc:*]"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json b/aai-resources/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
deleted file mode 100644
index 2dae9f5..0000000
--- a/aai-resources/src/main/runtime/shiroRole/contextadmin#__module.ajsc.namespace.name__.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:__module_ajsc_namespace_name__","name":"contextadmin:__module_ajsc_namespace_name__","permissions":"[]"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroRole/contextadmin#default.json b/aai-resources/src/main/runtime/shiroRole/contextadmin#default.json
deleted file mode 100644
index 5de814e..0000000
--- a/aai-resources/src/main/runtime/shiroRole/contextadmin#default.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroRoleClass":"ajsc.auth.ShiroRole","shiroRoleId":"contextadmin:default","name":"contextadmin:default","permissions":"[]"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroUser/ajsc.json b/aai-resources/src/main/runtime/shiroUser/ajsc.json
deleted file mode 100644
index f4c7855..0000000
--- a/aai-resources/src/main/runtime/shiroUser/ajsc.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroUserClass":"ajsc.auth.ShiroUser","shiroUserId":"ajsc","passwordHash":"9471697417008c880720ba54c6038791ad7e98f3b88136fe34f4d31a462dd27a","permissions":"[*:*]","username":"ajsc"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json b/aai-resources/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
deleted file mode 100644
index cb8d483..0000000
--- a/aai-resources/src/main/runtime/shiroUserRole/ajsc#ajscadmin.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:ajscadmin","roleId":"ajscadmin","userId":"ajsc"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json b/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
deleted file mode 100644
index 95d2361..0000000
--- a/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#__module.ajsc.namespace.name__.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:__module_ajsc_namespace_name__","roleId":"contextadmin:__module_ajsc_namespace_name__","userId":"ajsc"} \ No newline at end of file
diff --git a/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json b/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
deleted file mode 100644
index 2bd5063..0000000
--- a/aai-resources/src/main/runtime/shiroUserRole/ajsc#contextadmin#default.json
+++ /dev/null
@@ -1 +0,0 @@
-{"shiroUserRoleClass":"ajsc.auth.ShiroUserRole","shiroUserRoleId":"ajsc:contextadmin:default","roleId":"contextadmin:default","userId":"ajsc"} \ No newline at end of file
diff --git a/aai-resources/src/main/scripts/audit_schema.sh b/aai-resources/src/main/scripts/audit_schema.sh
index e0d0143..70aa535 100644
--- a/aai-resources/src/main/scripts/audit_schema.sh
+++ b/aai-resources/src/main/scripts/audit_schema.sh
@@ -21,29 +21,12 @@
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
#
-userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
-fi
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
-if [ -f "/etc/profile.d/aai.sh" ]; then
- source /etc/profile.d/aai.sh
-else
- echo "File not found: /etc/profile.d/aai.sh";
- exit
-fi
-
-JAVA=$JAVA_HOME/bin/java
-
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-$JAVA -Dhttps.protocols=TLSv1.1,TLSv1.2 -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME -cp $CLASSPATH org.onap.aai.db.schema.ScriptDriver $@
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.db.schema.ScriptDriver "" "$@"
+end_date;
+exit 0
diff --git a/aai-resources/src/main/scripts/common_functions.sh b/aai-resources/src/main/scripts/common_functions.sh
new file mode 100644
index 0000000..853941c
--- /dev/null
+++ b/aai-resources/src/main/scripts/common_functions.sh
@@ -0,0 +1,56 @@
+#!/bin/ksh
+
+# Common functions that can be used throughout multiple scripts
+# In order to call these functions, this file needs to be sourced
+
+# Checks if the user that is currently running is aaiadmin
+check_user(){
+
+ userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
+
+ if [ "${userid}" != "aaiadmin" ]; then
+ echo "You must be aaiadmin to run $0. The id used $userid."
+ exit 1
+ fi
+}
+
+# Sources the profile and sets the project home
+source_profile(){
+ . /etc/profile.d/aai.sh
+ PROJECT_HOME=/opt/app/aai-resources
+}
+
+# Runs the spring boot jar based on which main class
+# to execute and which logback file to use for that class
+execute_spring_jar(){
+
+ className=$1;
+ logbackFile=$2;
+
+ shift 2;
+
+ EXECUTABLE_JAR=$(ls ${PROJECT_HOME}/lib/*.jar);
+
+ JAVA_OPTS="${JAVA_PRE_OPTS}";
+ JAVA_OPTS="-DAJSC_HOME=$PROJECT_HOME";
+ JAVA_OPTS="$JAVA_OPTS -DBUNDLECONFIG_DIR=resources";
+ JAVA_OPTS="$JAVA_OPTS -Daai.home=$PROJECT_HOME ";
+ JAVA_OPTS="$JAVA_OPTS -Dhttps.protocols=TLSv1.1,TLSv1.2";
+ JAVA_OPTS="$JAVA_OPTS -Dloader.main=${className}";
+ JAVA_OPTS="$JAVA_OPTS -Dlogback.configurationFile=${logbackFile}";
+ JAVA_OPTS="${JAVA_OPTS} ${JAVA_POST_OPTS}";
+
+ ${JAVA_HOME}/bin/java ${JVM_OPTS} ${JAVA_OPTS} -jar ${EXECUTABLE_JAR} "$@"
+}
+
+# Prints the start date and the script that the user called
+start_date(){
+ echo
+ echo `date` " Starting $0"
+}
+
+# Prints the end date and the script that the user called
+end_date(){
+ echo
+ echo `date` " Done $0"
+}
diff --git a/aai-resources/src/main/scripts/createDBSchema.sh b/aai-resources/src/main/scripts/createDBSchema.sh
index 56f0fcc..491a89c 100644
--- a/aai-resources/src/main/scripts/createDBSchema.sh
+++ b/aai-resources/src/main/scripts/createDBSchema.sh
@@ -30,24 +30,11 @@
# Ie. createDbSchema.sh GEN_DB_WITH_NO_SCHEMA
#
-echo
-echo `date` " Starting $0"
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
-
-
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-$JAVA_HOME/bin/java -classpath $CLASSPATH -Dhttps.protocols=TLSv1.1,TLSv1.2 -DBUNDLECONFIG_DIR=bundleconfig -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME -Dlogback.configurationFile=$PROJECT_HOME/bundleconfig/etc/appprops/createDBSchema-logback.xml org.onap.aai.dbgen.GenTester $1
-
-echo `date` " Done $0"
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.dbgen.GenTester ${PROJECT_HOME}/resources/etc/appprops/createDBSchema-logback.xml "$@"
+end_date;
exit 0
diff --git a/aai-resources/src/main/scripts/deleteTool.sh b/aai-resources/src/main/scripts/deleteTool.sh
index b9ee8b0..3d7f923 100644
--- a/aai-resources/src/main/scripts/deleteTool.sh
+++ b/aai-resources/src/main/scripts/deleteTool.sh
@@ -65,7 +65,7 @@ fi
. /etc/profile.d/aai.sh
PROJECT_HOME=/opt/app/aai-resources
-prop_file=$PROJECT_HOME/bundleconfig/etc/appprops/aaiconfig.properties
+prop_file=$PROJECT_HOME/resources/etc/appprops/aaiconfig.properties
log_dir=$PROJECT_HOME/logs/misc
today=$(date +\%Y-\%m-\%d)
@@ -95,7 +95,7 @@ fi
if [ $MISSING_PROP = false ]; then
if [ $USEBASICAUTH = false ]; then
- AUTHSTRING="--cert $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPrivateKey.pem"
+ AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
else
AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
fi
diff --git a/aai-resources/src/main/scripts/dupeTool.sh b/aai-resources/src/main/scripts/dupeTool.sh
new file mode 100644
index 0000000..f088d5b
--- /dev/null
+++ b/aai-resources/src/main/scripts/dupeTool.sh
@@ -0,0 +1,73 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+#
+# dupeTool.sh -- This tool is used to look at or fix duplicate nodes for one nodeType
+# at a time and can be used to limit what it's looking at to just nodes created
+# within a recent time window.
+# It is made to deal with situations (like we have in 1610/1702) where one type
+# of node keeps needing to have duplicates cleaned up (tenant nodes).
+# It is needed because DataGrooming cannot be run often and cannot be focused just
+# on duplicates or just on one nodeType.
+#
+# Parameters:
+#
+# -userId (required) must be followed by a userid
+# -nodeType (required) must be followed by a valid nodeType
+# -timeWindowMinutes (optional) by default we would look at all nodes of the
+# given nodeType, but if a window is given, then we will only look at
+# nodes created that many (or fewer) minutes ago.
+# -autoFix (optional) use this if you want duplicates fixed automatically (if we
+# can figure out which to delete)
+# -maxFix (optional) like with dataGrooming lets you override the default maximum
+# number of dupes that can be processed at one time
+# -skipHostCheck (optional) By default, the dupe tool will check to see that it is running
+# on the host that is the first one in the list found in:
+# aaiconfig.properties aai.primary.filetransfer.serverlist
+# This is so that when run from the cron, it only runs on one machine.
+# This option lets you turn that checking off.
+# -sleepMinutes (optional) like with DataGrooming, you can override the
+# sleep time done when doing autoFix between first and second checks of the data.
+# -params4Collect (optional) followed by a string to tell what properties/values to use
+# to limit the nodes being looked at. Must be in the format
+# of “propertName|propValue” use commas to separate if there
+# are more than one name/value being passed.
+# -specialTenantRule (optional) turns on logic which will use extra logic to figure
+# out which tenant node can be deleted in a common scenario.
+#
+#
+# For example (there are many valid ways to use it):
+#
+# dupeTool.sh -userId am8383 -nodeType tenant -timeWindowMinutes 60 -autoFix
+# or
+# dupeTool.sh -userId am8383 -nodeType tenant -specialTenantRule -autoFix -maxFix 100
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.dbgen.DupeTool ${PROJECT_HOME}/resources/etc/appprops/dupeTool-logback.xml "$@"
+end_date;
+exit 0
diff --git a/aai-resources/src/main/scripts/dynamicPayloadArchive.sh b/aai-resources/src/main/scripts/dynamicPayloadArchive.sh
index f17f679..75d75d4 100644
--- a/aai-resources/src/main/scripts/dynamicPayloadArchive.sh
+++ b/aai-resources/src/main/scripts/dynamicPayloadArchive.sh
@@ -21,10 +21,13 @@
###
#
-# The script is called to tar and gzip the files under /opt/app/aai-resources/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation which are the payload files created by the dynamicPayloadGenerator.sh tool.
+# The script is called to tar and gzip the files under /opt/app/aai-resources/resources/etc/scriptdata/addmanualdata/tenant_isolation which are the payload files created by the dynamicPayloadGenerator.sh tool.
#
#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
. /etc/profile.d/aai.sh
PROJECT_HOME=/opt/app/aai-resources
@@ -38,7 +41,7 @@ if [ "${userid}" != $CHECK_USER ]; then
echo "You must be $CHECK_USER to run $0. The id used $userid."
exit 1
fi
-DIRECTORY=${PROJECT_HOME}/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation
+DIRECTORY=${PROJECT_HOME}/resources/etc/scriptdata/addmanualdata/tenant_isolation
if [ ! -d ${DIRECTORY} ]
then
echo " ${DIRECTORY} doesn't exist"
diff --git a/aai-resources/src/main/scripts/dynamicPayloadGenerator.sh b/aai-resources/src/main/scripts/dynamicPayloadGenerator.sh
index c386bed..6d46546 100644
--- a/aai-resources/src/main/scripts/dynamicPayloadGenerator.sh
+++ b/aai-resources/src/main/scripts/dynamicPayloadGenerator.sh
@@ -38,11 +38,11 @@
#
# For example (there are many valid ways to use it):
#
-# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-resources/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/'
+# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-resources/resources/etc/scriptdata/addmanualdata/tenant_isolation/'
#
# or
-# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-resources/bundleconfig/etc/appprops/dynamic.properties'
-# -o '/opt/app/aai-resources/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-resources/bundleconfig/etc/scriptdata/nodes.json'
+# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-resources/resources/etc/appprops/dynamic.properties'
+# -o '/opt/app/aai-resources/resources/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-resources/resources/etc/scriptdata/nodes.json'
#
@@ -63,10 +63,10 @@ display_usage() {
c. -f (optional) PAYLOAD or DMAAP-MR
d. -n (optional) input file for the script
4. For example (there are many valid ways to use it):
- dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-resources/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/'
+ dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-resources/resources/etc/scriptdata/addmanualdata/tenant_isolation/'
- dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-resources/bundleconfig/etc/appprops/dynamic.properties'
- -o '/opt/app/aai-resources/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-resources/bundleconfig/etc/scriptdata/nodes.json'
+ dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-resources/resources/etc/appprops/dynamic.properties'
+ -o '/opt/app/aai-resources/resources/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-resources/resources/etc/scriptdata/nodes.json'
EOF
}
@@ -75,27 +75,13 @@ if [ $# -eq 0 ]; then
exit 1
fi
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
-
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-
-$JAVA_HOME/bin/java -classpath $CLASSPATH -Dhttps.protocols=TLSv1.1,TLSv1.2 -DBUNDLECONFIG_DIR=bundleconfig -DAJSC_HOME=$PROJECT_HOME \
- -Daai.home=$PROJECT_HOME -Dlogback.configurationFile=$PROJECT_HOME/bundleconfig/etc/appprops/dynamicPayloadGenerator-logback.xml -Xmx9000m -Xms9000m \
- org.onap.aai.dbgen.DynamicPayloadGenerator "$@"
-
-
-echo `date` " Done $0"
-
+start_date;
+check_user;
+source_profile;
+export JVM_OPTS="-Xmx9000m -Xms9000m"
+execute_spring_jar org.onap.aai.dbgen.DynamicPayloadGenerator ${PROJECT_HOME}/resources/etc/appprops/dynamicPayloadGenerator-logback.xml "$@"
+end_date;
exit 0
diff --git a/aai-resources/src/main/scripts/edgeTagger.sh b/aai-resources/src/main/scripts/edgeTagger.sh
index b2bfcb9..abb3428 100644
--- a/aai-resources/src/main/scripts/edgeTagger.sh
+++ b/aai-resources/src/main/scripts/edgeTagger.sh
@@ -62,37 +62,25 @@
# or ./edgeTagger.sh "complex|ctag-pool"
#
-echo
-echo `date` " Starting $0"
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+start_date;
-userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
-fi
+echo " NOTE - if you are deleting data, please run the dataSnapshot.sh script first or "
+echo " at least make a note the details of the node that you are deleting. "
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
+check_user;
+source_profile;
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
+execute_spring_jar org.onap.aai.dbgen.UpdateEdgeTags "" "$@"
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
+PROCESS_STATUS=$?;
-$JAVA_HOME/bin/java -classpath $CLASSPATH -Dhttps.protocols=TLSv1.1,TLSv1.2 -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME \
- -Dcom.att.eelf.logging.file=default-logback.xml -Dcom.att.eelf.logging.path="$PROJECT_HOME/bundleconfig/etc/appprops/" \
- org.onap.aai.dbgen.UpdateEdgeTags $1
-if [ "$?" -ne "0" ]; then
- echo "Problem executing UpdateEdgeTags "
- exit 1
-fi
+if [ ${PROCESS_STATUS} -ne 0 ]; then
+ echo "Problem executing UpdateEdgeTags";
+ exit 1;
+fi;
-
-echo `date` " Done $0"
+end_date;
exit 0
diff --git a/aai-resources/src/main/scripts/forceDeleteTool.sh b/aai-resources/src/main/scripts/forceDeleteTool.sh
index 9c199bc..cb87b38 100644
--- a/aai-resources/src/main/scripts/forceDeleteTool.sh
+++ b/aai-resources/src/main/scripts/forceDeleteTool.sh
@@ -66,37 +66,19 @@
#
#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
-echo
-echo `date` " Starting $0"
+start_date;
echo " NOTE - if you are deleting data, please run the dataSnapshot.sh script first or "
echo " at least make a note the details of the node that you are deleting. "
-userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
-fi
+check_user;
+source_profile;
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
+execute_spring_jar org.onap.aai.dbgen.ForceDeleteTool ${PROJECT_HOME}/resources/etc/appprops/forceDelete-logback.xml "$@"
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-
-$JAVA_HOME/bin/java -classpath $CLASSPATH -Dhttps.protocols=TLSv1.1,TLSv1.2 -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME \
- org.onap.aai.dbgen.ForceDeleteTool "$@"
-
-
-echo `date` " Done $0"
+end_date;
exit 0
diff --git a/aai-resources/src/main/scripts/getTool.sh b/aai-resources/src/main/scripts/getTool.sh
index 8d006b0..3f61ab1 100644
--- a/aai-resources/src/main/scripts/getTool.sh
+++ b/aai-resources/src/main/scripts/getTool.sh
@@ -60,7 +60,7 @@ fi
. /etc/profile.d/aai.sh
PROJECT_HOME=/opt/app/aai-resources
-prop_file=$PROJECT_HOME/bundleconfig/etc/appprops/aaiconfig.properties
+prop_file=$PROJECT_HOME/resources/etc/appprops/aaiconfig.properties
log_dir=$PROJECT_HOME/logs/misc
today=$(date +\%Y-\%m-\%d)
@@ -91,11 +91,11 @@ fi
if [ $MISSING_PROP = false ]; then
if [ $USEBASICAUTH = false ]; then
- AUTHSTRING="--cert $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPrivateKey.pem"
+ AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
else
AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
fi
- curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | python -mjson.tool
+ curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | jq '.'
RC=$?;
else
echo "usage: $0 resource"
diff --git a/aai-resources/src/main/scripts/install/addManualData.sh b/aai-resources/src/main/scripts/install/addManualData.sh
index 930b9ba..9a5bb59 100644
--- a/aai-resources/src/main/scripts/install/addManualData.sh
+++ b/aai-resources/src/main/scripts/install/addManualData.sh
@@ -22,7 +22,7 @@
# this script now requires a release parameter.
# the tool finds and sorts *.txt files within the
-# bundleconfig/etc/scriptdate/addmanualdata/$release directory containing
+# resources/etc/scriptdata/addmanualdata/$release directory containing
# one resource to be added to the graph. The directory contains a second
# file with the same name, but the extension is .json. This json file
# is passed to the PutTool as the payload. The parameters passed to the
@@ -77,15 +77,15 @@ k=0
if [ "$1" = "tenant_isolation" ]
then
- CR_TEXT_PATH=`find $PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/cloud-region -name "*.txt" -print | sort -f`
- AZ_TEXT_PATH=`find $PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/availability-zone -name "*.txt" -print | sort -f`
- COMPLEX_TEXT_PATH=`find $PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/complex -name "*.txt" -print | sort -f`
- ZONE_TEXT_PATH=`find $PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/zone -name "*.txt" -print | sort -f`
- PSERVER_TEXT_PATH=`find $PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/tenant_isolation/pserver -name "*.txt" -print | sort -f`
+ CR_TEXT_PATH=`find $PROJECT_HOME/resources/etc/scriptdata/addmanualdata/tenant_isolation/cloud-region -name "*.txt" -print | sort -f`
+ AZ_TEXT_PATH=`find $PROJECT_HOME/resources/etc/scriptdata/addmanualdata/tenant_isolation/availability-zone -name "*.txt" -print | sort -f`
+ COMPLEX_TEXT_PATH=`find $PROJECT_HOME/resources/etc/scriptdata/addmanualdata/tenant_isolation/complex -name "*.txt" -print | sort -f`
+ ZONE_TEXT_PATH=`find $PROJECT_HOME/resources/etc/scriptdata/addmanualdata/tenant_isolation/zone -name "*.txt" -print | sort -f`
+ PSERVER_TEXT_PATH=`find $PROJECT_HOME/resources/etc/scriptdata/addmanualdata/tenant_isolation/pserver -name "*.txt" -print | sort -f`
TEXT_PATH="${CR_TEXT_PATH} ${AZ_TEXT_PATH} ${COMPLEX_TEXT_PATH} ${ZONE_TEXT_PATH} ${PSERVER_TEXT_PATH}"
COMMAND=${TEXT_PATH}
else
- TEXT_PATH=$PROJECT_HOME/bundleconfig/etc/scriptdata/addmanualdata/*/*.txt
+ TEXT_PATH=$PROJECT_HOME/resources/etc/scriptdata/addmanualdata/*/*.txt
COMMAND=`ls ${TEXT_PATH} | sort -f`
fi
diff --git a/aai-resources/src/main/scripts/putTool.sh b/aai-resources/src/main/scripts/putTool.sh
index b5ad862..46696b2 100644
--- a/aai-resources/src/main/scripts/putTool.sh
+++ b/aai-resources/src/main/scripts/putTool.sh
@@ -84,7 +84,7 @@ fi
. /etc/profile.d/aai.sh
PROJECT_HOME=/opt/app/aai-resources
-prop_file=$PROJECT_HOME/bundleconfig/etc/appprops/aaiconfig.properties
+prop_file=$PROJECT_HOME/resources/etc/appprops/aaiconfig.properties
log_dir=$PROJECT_HOME/logs/misc
today=$(date +\%Y-\%m-\%d)
@@ -121,15 +121,15 @@ fi
if [ $MISSING_PROP = false ]; then
if [ $USEBASICAUTH = false ]; then
- AUTHSTRING="--cert $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPrivateKey.pem"
+ AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
else
AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
fi
if [ $RETURNRESPONSE = true ]; then
- curl --request PUT -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T $JSONFILE $RESTURL$RESOURCE | python -mjson.tool
+ curl --request PUT -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T /tmp/$(basename $JSONFILE) $RESTURL$RESOURCE | jq '.'
RC=$?
else
- result=`curl --request PUT -sL -w "%{http_code}" -o /dev/null -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T $JSONFILE $RESTURL$RESOURCE`
+ result=`curl --request PUT -sL -w "%{http_code}" -o /dev/null -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T /tmp/$(basename $JSONFILE) $RESTURL$RESOURCE`
#echo "result is $result."
RC=0;
if [ $? -eq 0 ]; then
diff --git a/aai-resources/src/main/scripts/rshipTool.sh b/aai-resources/src/main/scripts/rshipTool.sh
index 8decd04..3952d44 100644
--- a/aai-resources/src/main/scripts/rshipTool.sh
+++ b/aai-resources/src/main/scripts/rshipTool.sh
@@ -113,7 +113,7 @@ fi
. /etc/profile.d/aai.sh
PROJECT_HOME=/opt/app/aai-resources
-prop_file=$PROJECT_HOME/bundleconfig/etc/appprops/aaiconfig.properties
+prop_file=$PROJECT_HOME/resources/etc/appprops/aaiconfig.properties
log_dir=$PROJECT_HOME/logs/misc
today=$(date +\%Y-\%m-\%d)
@@ -144,12 +144,12 @@ fi
if [ $MISSING_PROP = false ]; then
if [ $USEBASICAUTH = false ]; then
- AUTHSTRING="--cert $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/bundleconfig/etc/auth/aaiClientPrivateKey.pem"
+ AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
else
AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
fi
- RESOURCEVERSION=$(curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | python -c "import sys, json; print json.load(sys.stdin)['resource-version']")
+ RESOURCEVERSION=$(curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | jq -r '.["resource-version"]')
if [ $ACTION = "PUT" ]; then
result=`curl --request PUT -sL -w "%{http_code}" -o /dev/null -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T $JSONFILE $RESTURL$RESOURCE$RELATIONSHIP?$RESOURCEVERSION`
#echo "result is $result."
@@ -161,7 +161,7 @@ if [ $MISSING_PROP = false ]; then
if [[ "$result" -ge 200 && $result -lt 300 ]]
then
echo "PUT result is OK, $result"
- curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | python -mjson.tool
+ curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | jq '.'
else
if [ -z $ALLOWHTTPRESPONSES ]; then
echo "PUT request failed, response code was $result"
@@ -198,7 +198,7 @@ if [ $MISSING_PROP = false ]; then
if [[ "$result" -ge 200 && $result -lt 300 ]]
then
echo "DELETE result is OK, $result"
- curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | python -mjson.tool
+ curl --request GET -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" $RESTURL$RESOURCE | jq '.'
else
echo "failed DELETE request, response code was $result"
RC=$result
diff --git a/aai-resources/src/main/scripts/run_DbTestProcessBuilder.sh b/aai-resources/src/main/scripts/run_DbTestProcessBuilder.sh
index b3a3ac4..6e6ac6f 100644
--- a/aai-resources/src/main/scripts/run_DbTestProcessBuilder.sh
+++ b/aai-resources/src/main/scripts/run_DbTestProcessBuilder.sh
@@ -1,15 +1,16 @@
#!/bin/ksh
-#
+
+###
# ============LICENSE_START=======================================================
# org.onap.aai
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -17,39 +18,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-#
-# This script invokes the dataSnapshot java class passing an option to tell it to take
-# a snapshot of the database and store it as a single-line XML file.
-#
-
-echo
-echo `date` " Starting $0"
-
-
-userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
-fi
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-resources
+###
-for JAR in `ls $PROJECT_HOME/extJars/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
-for JAR in `ls $PROJECT_HOME/lib/*.jar`
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
+start_date;
+check_user;
+source_profile;
-$JAVA_HOME/bin/java -classpath $CLASSPATH -Dhttps.protocols=TLSv1.1,TLSv1.2 -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME org.onap.aai.util.DbTestProcessBuilder "$@"
+CERTPATH=$PROJECT_HOME/resources/etc/auth/
+KEYNAME=aaiClientPrivateKey.pem
+CERTNAME=aaiClientPublicCert.pem
-echo `date` " Done $0"
+pw=$(execute_spring_jar org.onap.aai.util.AAIConfigCommandLinePropGetter "" "aai.keystore.passwd" 2> /dev/null | tail -1)
+openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$CERTNAME -clcerts -nokeys -passin pass:$pw
+openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$KEYNAME -nocerts -nodes -passin pass:$pw
+end_date;
exit 0
diff --git a/aai-resources/src/main/scripts/run_Migrations.sh b/aai-resources/src/main/scripts/run_Migrations.sh
index 7f0ea08..1309094 100644
--- a/aai-resources/src/main/scripts/run_Migrations.sh
+++ b/aai-resources/src/main/scripts/run_Migrations.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -20,57 +20,28 @@
# ============LICENSE_END=========================================================
###
-echo
-echo $(date) " Starting $0"
-
-userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
-fi
-
-if [ -f "/etc/profile.d/aai.sh" ]; then
- source /etc/profile.d/aai.sh
-else
- echo "File not found: /etc/profile.d/aai.sh";
- exit
-fi
-
-JAVA=$JAVA_HOME/bin/java
-PROJECT_HOME=/opt/app/aai-resources
-
-ARGS="-c ${PROJECT_HOME}/bundleconfig/etc/appprops/titan-realtime.properties $@"
-
-for JAR in $(ls $PROJECT_HOME/extJars/*.jar)
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-UUID=$(uuidgen)
-
-unzip -o $PROJECT_HOME/lib/ajsc-runner-5.0.0-RC16.0.5.jar -d /tmp/ajsc-war-$UUID/ > /dev/null
-unzip -o /tmp/ajsc-war-$UUID/ajsc-war-5.0.0-RC16.0.5.war -d /tmp/ajsc-war-$UUID/ > /dev/null
-
-for JAR in $(ls /tmp/ajsc-war-$UUID/WEB-INF/lib/*.jar)
-do
- if [[ ! "$JAR" =~ .*logback-classic-.*.jar ]];
- then
- CLASSPATH=$CLASSPATH:$JAR
- fi
-done
-
-for JAR in $(ls /opt/app/swm/dme2/lib/*.jar)
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-for JAR in $(ls $PROJECT_HOME/lib/*.jar)
-do
- CLASSPATH=$CLASSPATH:$JAR
-done
-
-CLASSPATH=$CLASSPATH:${PROJECT_HOME}"/bundleconfig/etc/tmp-config/"
-
-$JAVA -Dhttps.protocols=TLSv1.1,TLSv1.2 -DAJSC_HOME=$PROJECT_HOME -Daai.home=$PROJECT_HOME -DBUNDLECONFIG_DIR="bundleconfig" -Dlogback.configurationFile=$PROJECT_HOME/bundleconfig/etc/appprops/migration-logback.xml -cp $CLASSPATH org.onap.aai.migration.MigrationController $ARGS
-
-rm -r /tmp/ajsc-war-$UUID/
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+# TODO: There is a better way where you can pass in the function
+# and then let the common functions check if the function exist and invoke it
+# So this all can be templated out
+start_date;
+check_user;
+source_profile;
+
+ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/titan-realtime.properties";
+
+if [ -f "$PROJECT_HOME/resources/application.properties" ]; then
+ # Get the application properties file and look for all lines
+ # starting with either jms dmaap or niws
+ # Turn them into system properties and export JAVA_PRE_OPTS so
+ # execute spring jar will get those values
+ # This is only needed since dmaap is used by run_migrations
+ JAVA_PRE_OPTS=$(egrep '^(dmaap|jms|niws)' $PROJECT_HOME/resources/application.properties | sed 's/^\(.*\)$/-D\1/g' | tr '\n' ' ');
+ export JAVA_PRE_OPTS;
+fi;
+
+execute_spring_jar org.onap.aai.migration.MigrationController ${PROJECT_HOME}/resources/etc/appprops/migration-logback.xml ${ARGS} "$@"
+end_date;
+exit 0
diff --git a/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/docker-compose.template.yaml b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/docker-compose.template.yaml
new file mode 100644
index 0000000..5a2712b
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/docker-compose.template.yaml
@@ -0,0 +1,41 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+# SCLD_ENV is for the environment context for dme2
+# AAI_CHEF_ENV is used for both dme2 properites and
+# also used by chef to generate appropriate properties
+
+version: '2'
+services:
+ aai-resources:
+ image: __REGISTRY__/__NAMESPACE__/aai-resources:__IMAGE_VERSION__
+ network_mode: host
+ environment:
+ - LOCAL_USER_ID=__LOCAL_USER_ID__
+ - LOCAL_GROUP_ID=__LOCAL_GROUP_ID__
+ volumes:
+ - /opt/aai/logroot/AAI-RES:/opt/aai/logroot/AAI-RES
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "30m"
+ max-file: "5"
diff --git a/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/execTool.sh b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/execTool.sh
new file mode 100644
index 0000000..8613410
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/execTool.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+
+export WORKING_DIR="$( cd "$(dirname "$0")" ; pwd -P )/"
+
+CONTAINER_NAME=$(docker ps | grep 'aai-resources' | awk '{ print $7; }');
+
+SCRIPT_NAME=$1;
+
+shift;
+
+docker exec -u aaiadmin ${CONTAINER_NAME} ls /opt/app/aai-resources/scripts/${SCRIPT_NAME} && {
+ docker exec -u aaiadmin ${CONTAINER_NAME} /opt/app/aai-resources/scripts/${SCRIPT_NAME} "$@"
+ exit 0;
+} || {
+ echo "Unable to find the tool in the /opt/app/aai-resources/scripts";
+ exit 1;
+}
diff --git a/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/kill_resources.sh b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/kill_resources.sh
new file mode 100755
index 0000000..d8e14e5
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/kill_resources.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+. /etc/profile.d/aai.sh
+PROJECT_HOME=/opt/app/aai-resources
+
+docker-compose -f ${PROJECT_HOME}/docker-compose.yaml stop && \
+ docker-compose -f ${PROJECT_HOME}/docker-compose.yaml rm -f
diff --git a/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/start_resources.sh b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/start_resources.sh
new file mode 100755
index 0000000..e9d73ec
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/start_resources.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+. /etc/profile.d/aai.sh
+PROJECT_HOME=/opt/app/aai-resources
+
+docker-compose -f ${PROJECT_HOME}/docker-compose.yaml up -d || exit 200
diff --git a/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/stop_resources.sh b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/stop_resources.sh
new file mode 100755
index 0000000..05bd23d
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/dist_files/opt/app/aai-resources/stop_resources.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+. /etc/profile.d/aai.sh
+PROJECT_HOME=/opt/app/aai-resources
+
+docker-compose -f ${PROJECT_HOME}/docker-compose.yaml stop || exit 200
diff --git a/aai-resources/src/main/swm/package/nix/install/postproc/post_proc b/aai-resources/src/main/swm/package/nix/install/postproc/post_proc
new file mode 100644
index 0000000..d017750
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/install/postproc/post_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_postproc.sh
diff --git a/aai-resources/src/main/swm/package/nix/install/preproc/pre_proc b/aai-resources/src/main/swm/package/nix/install/preproc/pre_proc
new file mode 100644
index 0000000..3f1b26f
--- /dev/null
+++ b/aai-resources/src/main/swm/package/nix/install/preproc/pre_proc
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd ../../common
+exec sh -x ./install_preproc.sh