aboutsummaryrefslogtreecommitdiffstats
path: root/aai-traversal/src/main
diff options
context:
space:
mode:
authorKajur, Harish (vk250x) <vk250x@att.com>2018-08-13 02:44:43 -0400
committerKajur, Harish (vk250x) <vk250x@att.com>2018-08-13 12:35:56 -0400
commit9220f729873a7be6455f832468f5090ccd2a25cb (patch)
tree26ea9dc633bad1bd9bb0f25465a998cd85e6ee5f /aai-traversal/src/main
parentcdeda67931317caa0582b6e806aaf2df42616170 (diff)
Update traversal to use schema ingest library
Update the traversal microservice to use the ingest library to make the application more model driven Issue-ID: AAI-1465 Change-Id: Ifcf56b5d0e7f25ce2bc735b3186e09bc28234100 Signed-off-by: Kajur, Harish (vk250x) <vk250x@att.com>
Diffstat (limited to 'aai-traversal/src/main')
-rw-r--r--aai-traversal/src/main/docker/Dockerfile6
-rw-r--r--aai-traversal/src/main/docker/docker-entrypoint.sh46
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/TraversalApp.java33
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/config/DslConfiguration.java (renamed from aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java)25
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/config/ErrorHandler.java87
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/config/SearchConfiguration.java50
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java1435
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java81
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java35
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java20
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java65
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java67
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java23
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java24
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java26
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java19
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java50
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java3
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java24
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java17
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java65
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java148
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java14
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java185
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java101
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java57
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/DslConsumer.java187
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java95
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/RecentAPIConsumer.java253
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslContext.java130
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslListener.java267
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryBuilder.java190
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java31
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java140
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/CustomQueryConfig.java62
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java124
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GetCustomQueryConfig.java232
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java87
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java75
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java93
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java95
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java53
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/NodeQueryProcessor.java116
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java93
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/util/ConvertQueryPropertiesToJson.java180
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java3
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/service/AuthorizationService.java3
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/service/RetiredService.java67
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java111
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java177
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/util/TraversalConstants.java (renamed from aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java)22
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/web/JerseyConfiguration.java54
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/web/LocalHostAccessLog.java12
-rw-r--r--aai-traversal/src/main/resources/antlr4/org/onap/aai/AAIDsl.g417
-rw-r--r--aai-traversal/src/main/resources/application.properties83
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/Introscope.properties8
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/PostProcessorInterceptors.properties3
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties30
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/aaiconfig.properties77
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/default-logback.xml43
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/error.properties12
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/gremlin-server-config.yaml25
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/janusgraph-cached.properties10
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/janusgraph-realtime.properties10
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/methodMapper.properties24
-rw-r--r--aai-traversal/src/main/resources/etc/appprops/preferredRoute.txt1
-rw-r--r--aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getClfiRoadmTailSummary-1.0.json186
-rw-r--r--aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getRouterRoadmTailSummary-1.0.json238
-rw-r--r--aai-traversal/src/main/resources/etc/sysprops/sys-props.properties140
-rw-r--r--aai-traversal/src/main/resources/etc/sysprops/template.sys-props.properties115
-rw-r--r--aai-traversal/src/main/resources/retired.properties5
-rw-r--r--aai-traversal/src/main/resources/schema/UebEventLogEntry.xsd43
-rw-r--r--aai-traversal/src/main/resources/schema/onap/query/stored-queries.json (renamed from aai-traversal/src/main/resources/etc/query/stored-queries.json)198
-rw-r--r--aai-traversal/src/main/scripts/common_functions.sh10
-rw-r--r--aai-traversal/src/main/scripts/install/instutils.sh725
-rw-r--r--aai-traversal/src/main/scripts/putTool.sh24
-rw-r--r--aai-traversal/src/main/swm/package/nix/common/deinstall.env10
-rw-r--r--aai-traversal/src/main/swm/package/nix/common/install_postproc.sh39
-rw-r--r--aai-traversal/src/main/swm/package/nix/deinstall/preproc/pre_proc4
-rw-r--r--aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/docker-compose.template.yaml51
-rw-r--r--aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/execTool.sh58
-rw-r--r--aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/kill_resources.sh7
-rw-r--r--aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/start_resources.sh6
-rw-r--r--aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/stop_resources.sh6
-rw-r--r--aai-traversal/src/main/swm/package/nix/initinst/postproc/post_proc4
-rw-r--r--aai-traversal/src/main/swm/package/nix/install/postproc/post_proc4
-rw-r--r--aai-traversal/src/main/swm/package/nix/install/preproc/pre_proc4
87 files changed, 4047 insertions, 3831 deletions
diff --git a/aai-traversal/src/main/docker/Dockerfile b/aai-traversal/src/main/docker/Dockerfile
index 3d25170..bbe0aa3 100644
--- a/aai-traversal/src/main/docker/Dockerfile
+++ b/aai-traversal/src/main/docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM aaionap/aai-common:1.2.0
+FROM aaionap/aai-common:1.3.0
# Add the proper files into the docker image from your build
@@ -8,6 +8,7 @@ WORKDIR /opt/app/aai-traversal
# 8446 is the important one to be used
EXPOSE 8446
+
HEALTHCHECK --interval=40s --timeout=10s --retries=3 CMD nc -z -v localhost 8446 || exit 1
ENTRYPOINT ["/bin/bash", "/opt/app/aai-traversal/docker-entrypoint.sh"]
@@ -16,6 +17,9 @@ RUN mkdir -p /opt/aaihome/aaiadmin /opt/aai/logroot/AAI-GQ
VOLUME /opt/aai/logroot/AAI-GQ
+VOLUME /tmp
+VOLUME /opt/tools
+
COPY /maven/aai-traversal/ .
ENV AAI_BUILD_VERSION @aai.docker.version@
diff --git a/aai-traversal/src/main/docker/docker-entrypoint.sh b/aai-traversal/src/main/docker/docker-entrypoint.sh
index ea5ac6b..4dc703b 100644
--- a/aai-traversal/src/main/docker/docker-entrypoint.sh
+++ b/aai-traversal/src/main/docker/docker-entrypoint.sh
@@ -28,9 +28,6 @@ export CHEF_DATA_GIT_URL=${CHEF_DATA_GIT_URL:-$CHEF_GIT_URL};
export SERVER_PORT=${SERVER_PORT:-8446};
-export RESOURCES_HOSTNAME=${RESOURCES_HOSTNAME:-aai-resources.api.simpledemo.onap.org};
-export RESOURCES_PORT=${RESOURCES_PORT:-8447};
-
USER_ID=${LOCAL_USER_ID:-9001}
GROUP_ID=${LOCAL_GROUP_ID:-9001}
@@ -52,7 +49,6 @@ if [ -f ${APP_HOME}/aai.sh ]; then
gosu aaiadmin ln -s bin scripts
gosu aaiadmin ln -s /opt/aai/logroot/AAI-GQ logs
-
mv ${APP_HOME}/aai.sh /etc/profile.d/aai.sh
chmod 755 /etc/profile.d/aai.sh
@@ -74,6 +70,7 @@ if [ -f ${APP_HOME}/aai.sh ]; then
exit 0;
fi;
+
fi;
if [ -z ${DISABLE_UPDATE_QUERY} ]; then
@@ -82,23 +79,30 @@ if [ -z ${DISABLE_UPDATE_QUERY} ]; then
gosu aaiadmin touch ${UPDATE_QUERY_RAN_FILE};
fi
+mkdir -p /opt/app/aai-traversal/logs/gc
+chown -R aaiadmin:aaiadmin /opt/app/aai-traversal/logs/gc
+
+if [ -f ${APP_HOME}/resources/aai-traversal-swm-vars.sh ]; then
+ source ${APP_HOME}/resources/aai-traversal-swm-vars.sh;
+fi;
+
+MIN_HEAP_SIZE=${MIN_HEAP_SIZE:-512m};
+MAX_HEAP_SIZE=${MAX_HEAP_SIZE:-1024m};
+MAX_PERM_SIZE=${MAX_PERM_SIZE:-512m};
+PERM_SIZE=${PERM_SIZE:-512m}
+
JAVA_CMD="exec gosu aaiadmin java";
-JVM_OPTS="${PRE_JVM_OPTS} -XX:+UnlockDiagnosticVMOptions";
-JVM_OPTS="${JVM_OPTS} -XX:+UnsyncloadClass";
-JVM_OPTS="${JVM_OPTS} -XX:+UseConcMarkSweepGC";
-JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled";
-JVM_OPTS="${JVM_OPTS} -XX:+UseCMSInitiatingOccupancyOnly";
-JVM_OPTS="${JVM_OPTS} -XX:CMSInitiatingOccupancyFraction=70";
-JVM_OPTS="${JVM_OPTS} -XX:+ScavengeBeforeFullGC";
-JVM_OPTS="${JVM_OPTS} -XX:+CMSScavengeBeforeRemark";
-JVM_OPTS="${JVM_OPTS} -XX:-HeapDumpOnOutOfMemoryError";
-JVM_OPTS="${JVM_OPTS} -XX:+UseParNewGC";
-JVM_OPTS="${JVM_OPTS} -verbose:gc";
+JVM_OPTS="${PRE_JVM_ARGS} -Xloggc:/opt/app/aai-traversal/logs/gc/aai_gc.log";
+JVM_OPTS="${JVM_OPTS} -XX:HeapDumpPath=/opt/app/aai-traversal/logs/ajsc-jetty/heap-dump";
+JVM_OPTS="${JVM_OPTS} -Xms${MIN_HEAP_SIZE}";
+JVM_OPTS="${JVM_OPTS} -Xmx${MAX_HEAP_SIZE}";
+
JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
-JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=512M";
-JVM_OPTS="${JVM_OPTS} -XX:PermSize=512M";
+JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=${MAX_PERM_SIZE}";
+JVM_OPTS="${JVM_OPTS} -XX:PermSize=${PERM_SIZE}";
+
JVM_OPTS="${JVM_OPTS} -server";
JVM_OPTS="${JVM_OPTS} -XX:NewSize=512m";
JVM_OPTS="${JVM_OPTS} -XX:MaxNewSize=512m";
@@ -114,11 +118,9 @@ JVM_OPTS="${JVM_OPTS} -XX:ParallelGCThreads=4";
JVM_OPTS="${JVM_OPTS} -XX:LargePageSizeInBytes=128m";
JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
-JVM_OPTS="${JVM_OPTS} -Xloggc:/opt/app/aai-traversal/logs/ajsc-jetty/gc/aai_gc.log";
JVM_OPTS="${JVM_OPTS} -Dsun.net.inetaddr.ttl=180";
JVM_OPTS="${JVM_OPTS} -XX:+HeapDumpOnOutOfMemoryError";
-JVM_OPTS="${JVM_OPTS} -XX:HeapDumpPath=/opt/app/aai-traversal/logs/ajsc-jetty/heap-dump";
-JVM_OPTS="${JVM_OPTS} ${POST_JVM_OPTS}";
+JVM_OPTS="${JVM_OPTS} ${POST_JVM_ARGS}";
JAVA_OPTS="${PRE_JAVA_OPTS} -DAJSC_HOME=$APP_HOME";
JAVA_OPTS="${JAVA_OPTS} -Dserver.port=${SERVER_PORT}";
@@ -127,9 +129,9 @@ JAVA_OPTS="${JAVA_OPTS} -Dserver.local.startpath=${RESOURCES_HOME}";
JAVA_OPTS="${JAVA_OPTS} -DAAI_CHEF_ENV=${AAI_CHEF_ENV}";
JAVA_OPTS="${JAVA_OPTS} -DSCLD_ENV=${SCLD_ENV}";
JAVA_OPTS="${JAVA_OPTS} -DAFT_ENVIRONMENT=${AFT_ENVIRONMENT}";
-JAVA_OPTS="${JAVA_OPTS} -DlrmName=com.att.ajsc.traversal";
-JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_NUMBER=${AAI_BUILD_NUMBER}";
+JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_VERSION=${AAI_BUILD_VERSION}";
JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom";
+JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=./resources/logback.xml";
JAVA_OPTS="${JAVA_OPTS} -Dloader.path=$APP_HOME/resources";
JAVA_OPTS="${JAVA_OPTS} ${POST_JAVA_OPTS}";
diff --git a/aai-traversal/src/main/java/org/onap/aai/TraversalApp.java b/aai-traversal/src/main/java/org/onap/aai/TraversalApp.java
index 6fdf3b0..2797545 100644
--- a/aai-traversal/src/main/java/org/onap/aai/TraversalApp.java
+++ b/aai-traversal/src/main/java/org/onap/aai/TraversalApp.java
@@ -22,10 +22,12 @@ package org.onap.aai;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.config.SpringContextAware;
import org.onap.aai.dbmap.AAIGraph;
import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.introspection.ModelInjestor;
import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.nodes.NodeIngestor;
import org.onap.aai.util.AAIConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
@@ -36,10 +38,12 @@ import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerA
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.core.env.Environment;
+import org.slf4j.MDC;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.UUID;
+import java.util.Map;
@SpringBootApplication
// Component Scan provides a way to look for spring beans
@@ -49,11 +53,12 @@ import java.util.UUID;
@ComponentScan(basePackages = {
"org.onap.aai.config",
"org.onap.aai.web",
+ "org.onap.aai.setup",
"org.onap.aai.tasks",
"org.onap.aai.service",
- "org.onap.aai.rest",
- "com.att.ajsc.common"
+ "org.onap.aai.rest"
})
+
@EnableAutoConfiguration(exclude = {
DataSourceAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
@@ -64,10 +69,21 @@ public class TraversalApp {
private static final EELFLogger logger = EELFManager.getInstance().getLogger(TraversalApp.class.getName());
private static final String APP_NAME = "aai-traversal";
-
+ private static Map<String,String> contextMap;
+
@Autowired
private Environment env;
+ @Autowired
+ private NodeIngestor nodeIngestor;
+
+ @Autowired
+ private SpringContextAware context;
+
+ @Autowired
+ private SpringContextAware loaderFactory;
+
+
@PostConstruct
private void init() throws AAIException {
System.setProperty("org.onap.aai.serverStarted", "false");
@@ -80,6 +96,8 @@ public class TraversalApp {
LoggingContext.requestId(UUID.randomUUID().toString());
LoggingContext.serviceName(APP_NAME);
LoggingContext.targetServiceName("contextInitialized");
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ contextMap = MDC.getCopyOfContextMap();
logger.info("AAI Server initialization started...");
@@ -87,14 +105,14 @@ public class TraversalApp {
// This is only needed for tomcat keeping this as temporary
System.setProperty("org.apache.tomcat.util.buf.UDecoder.ALLOW_ENCODED_SLASH", "true");
- logger.info("Starting AAIGraph connections and the ModelInjestor");
+ logger.info("Starting AAIGraph connections and the NodeInjestor");
if(env.acceptsProfiles(Profiles.TWO_WAY_SSL) && env.acceptsProfiles(Profiles.ONE_WAY_SSL)){
logger.warn("You have seriously misconfigured your application");
}
AAIConfig.init();
- ModelInjestor.getInstance();
+
AAIGraph.getInstance();
}
@@ -108,10 +126,11 @@ public class TraversalApp {
setDefaultProps();
SpringApplication app = new SpringApplication(TraversalApp.class);
+ app.setLogStartupInfo(false);
app.setRegisterShutdownHook(true);
app.addInitializers(new PropertyPasswordConfiguration());
Environment env = app.run(args).getEnvironment();
-
+ MDC.setContextMap (contextMap);
logger.info(
"Application '{}' is running on {}!" ,
env.getProperty("spring.application.name"),
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java b/aai-traversal/src/main/java/org/onap/aai/config/DslConfiguration.java
index 9a9c183..74bc046 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java
+++ b/aai-traversal/src/main/java/org/onap/aai/config/DslConfiguration.java
@@ -17,11 +17,28 @@
* limitations under the License.
* ============LICENSE_END=========================================================
*/
-package org.onap.aai.rest.retired;
+package org.onap.aai.config;
-import javax.ws.rs.Path;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.rest.dsl.DslListener;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.springframework.beans.factory.config.ConfigurableBeanFactory;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Scope;
-@Path("{version: v[78]}/service-design-and-creation/named-queries")
-public class V7V8NamedQueries extends RetiredConsumer {
+@Configuration
+public class DslConfiguration {
+ @Bean
+ @Scope(scopeName = ConfigurableBeanFactory.SCOPE_PROTOTYPE)
+ public DslListener dslListener(EdgeIngestor edgeIngestor){
+ return new DslListener(edgeIngestor);
+ }
+
+ @Bean
+ @Scope(scopeName = ConfigurableBeanFactory.SCOPE_PROTOTYPE)
+ public DslQueryProcessor dslQueryProcessor(DslListener dslListener){
+ return new DslQueryProcessor(dslListener);
+ }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/config/ErrorHandler.java b/aai-traversal/src/main/java/org/onap/aai/config/ErrorHandler.java
new file mode 100644
index 0000000..22e12a6
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/config/ErrorHandler.java
@@ -0,0 +1,87 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+import org.springframework.web.filter.OncePerRequestFilter;
+
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Responsible for dealing with uri that doesn't start with basePath
+ * All of the other interceptors will handle any uri that starts with basePath
+ * So we need this to ensure that these cases are properly handled
+ */
+@Order(1)
+@Component
+public class ErrorHandler extends OncePerRequestFilter {
+
+ private String basePath;
+
+ public ErrorHandler(@Value("${schema.uri.base.path}") String basePath){
+ this.basePath = basePath;
+ if(!basePath.endsWith("/")){
+ this.basePath = basePath + "/";
+ }
+ }
+
+ @Override
+ protected void doFilterInternal(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, FilterChain filterChain) throws ServletException, IOException {
+
+ String uri = httpServletRequest.getRequestURI();
+
+ if (uri != null && !(uri.startsWith(basePath))) {
+
+ AAIException e = new AAIException("AAI_3012");
+ ArrayList<String> templateVars = new ArrayList<>();
+
+ List<MediaType> mediaTypeList = new ArrayList<>();
+
+ String acceptHeader = httpServletRequest.getHeader("Accept");
+ if (acceptHeader == null) {
+ mediaTypeList.add(MediaType.APPLICATION_XML_TYPE);
+ } else {
+ mediaTypeList.add(MediaType.valueOf(acceptHeader));
+ }
+
+ String message = ErrorLogHelper.getRESTAPIErrorResponse(mediaTypeList, e, templateVars);
+
+ httpServletResponse.setStatus(400);
+ httpServletResponse.setContentType(mediaTypeList.get(0).toString());
+ httpServletResponse.getWriter().print(message);
+ httpServletResponse.getWriter().close();
+ return;
+ }
+
+ filterChain.doFilter(httpServletRequest, httpServletResponse);
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/config/SearchConfiguration.java b/aai-traversal/src/main/java/org/onap/aai/config/SearchConfiguration.java
new file mode 100644
index 0000000..bef10d0
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/config/SearchConfiguration.java
@@ -0,0 +1,50 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.onap.aai.dbgraphmap.SearchGraph;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.rest.dsl.DslListener;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.rest.search.GremlinServerSingleton;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.beans.factory.config.ConfigurableBeanFactory;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Scope;
+
+@Configuration
+public class SearchConfiguration {
+
+ @Bean
+ public SearchGraph searchGraph(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions) {
+ SearchGraph searchGraph = new SearchGraph(loaderFactory, edgeIngestor, schemaVersions);
+ return searchGraph;
+ }
+
+ @Bean
+ public GremlinServerSingleton gremlinServerSingleton(){
+ return new GremlinServerSingleton();
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java
index 1f7a75f..dc42120 100644
--- a/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java
+++ b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java
@@ -32,17 +32,22 @@ import org.apache.tinkerpop.gremlin.structure.VertexProperty;
import org.onap.aai.db.DbMethHelper;
import org.onap.aai.db.props.AAIProperties;
import org.onap.aai.dbgen.PropertyLimitDesc;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.enums.EdgeType;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.Introspector;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.logging.LogFormatTools;
import org.onap.aai.query.builder.QueryBuilder;
import org.onap.aai.schema.enums.PropertyMetadata;
import org.onap.aai.serialization.db.DBSerializer;
-import org.onap.aai.serialization.db.EdgeRules;
-import org.onap.aai.serialization.db.EdgeType;
+
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.onap.aai.util.AAIConfig;
+import org.onap.aai.concurrent.AaiCallable;
+import org.onap.aai.config.SpringContextAware;
import java.util.*;
import java.util.concurrent.Callable;
@@ -60,9 +65,9 @@ public class ModelBasedProcessing {
private Loader loader;
private DBSerializer serializer;
private DbMethHelper dbMethHelper;
-
+
protected ModelBasedProcessing() {
-
+
}
public ModelBasedProcessing(Loader loader, TransactionalGraphEngine engine, DBSerializer serializer) {
this.loader = loader;
@@ -85,25 +90,25 @@ public class ModelBasedProcessing {
* @throws AAIException the AAI exception
*/
public Map<String,String> getStartNodesAndModVersionIds( String transId, String fromAppId,
- String passedModelVersionId,
+ String passedModelVersionId,
String passedModelInvId,
String passedModelName,
String passedTopNodeType,
- List<Map<String,Object>> startNodeFilterArrayOfHashes,
- String apiVer )
+ List<Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer )
throws AAIException {
// ----------------------------------------------------------------------------------------------------
// Get a hash for all start-nodes (key = vtxId, val = modelVersionId that applies)
- // If no start-node-key info is passed, then use either the passed modelVersion or
+ // If no start-node-key info is passed, then use either the passed modelVersion or
// the passed model-invariant-id or model-name to collect them.
- // If start-node-key info is given, use it instead to look for start-nodes.
- // Note: if ONLY start-node-key info is given, then it would have to map to nodes which
+ // If start-node-key info is given, use it instead to look for start-nodes.
+ // Note: if ONLY start-node-key info is given, then it would have to map to nodes which
// have persona data. Otherwise we'd have no way to know what model to collect data with.
// ----------------------------------------------------------------------------------------------------
Iterator<Vertex> startVerts = null;
Map<String, String> startVertInfo = new HashMap<>();
-
+
if( startNodeFilterArrayOfHashes.isEmpty() ){
// Since they did not give any data to find start instances, we will have to find them
// using whatever model-info they provided so we can use it to map to persona-data in the db.
@@ -125,7 +130,7 @@ public class ModelBasedProcessing {
if( calcModId != null ){
startVerts = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),calcModId).has(addDBAliasedSuffix("model-version-id"),passedModelVersionId);
}
- }
+ }
else if( passedModelInvId != null && !passedModelInvId.equals("") ){
// They gave us the model-invariant-id
startVerts = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),passedModelInvId);
@@ -138,11 +143,11 @@ public class ModelBasedProcessing {
for( int i = 0; i < modelVerVtxList.size(); i++ ){
String calcModVerId = (modelVerVtxList.get(i)).<String>property("model-version-id").orElse(null);
Vertex modVtx = getModelGivenModelVer(modelVerVtxList.get(i),"");
- String calcModInvId = modVtx.<String>property("model-invariant-id").orElse(null);
+ String calcModInvId = modVtx.<String>property("model-invariant-id").orElse(null);
// Now we can look up instances that match this model's info
Iterator<Vertex> tmpStartIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),calcModInvId).has(addDBAliasedSuffix("model-version-id"),calcModVerId);
while( tmpStartIter.hasNext() ){
- Vertex tmpStartVert = tmpStartIter.next();
+ Vertex tmpStartVert = (Vertex) tmpStartIter.next();
startVtxList.add(tmpStartVert);
}
}
@@ -150,12 +155,12 @@ public class ModelBasedProcessing {
if( !startVtxList.isEmpty() ){
startVerts = startVtxList.iterator();
}
- }
+ }
}
-
- if( startVerts != null ){
+
+ if( startVerts != null ){
while( startVerts.hasNext() ){
- Vertex tmpStartVert = startVerts.next();
+ Vertex tmpStartVert = (Vertex) startVerts.next();
String vid = tmpStartVert.id().toString();
String tmpModId = tmpStartVert.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
String tmpModVerId = tmpStartVert.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
@@ -164,12 +169,12 @@ public class ModelBasedProcessing {
}
if( startVertInfo.isEmpty() ){
throw new AAIException("AAI_6114", "Start Node(s) could not be found for model data passed. " +
- "(modelVersionId = [" + passedModelVersionId +
+ "(modelVersionId = [" + passedModelVersionId +
"], modelInvariantId = [" + passedModelInvId +
"], modelName = [" + passedModelName +
"])");
}
-
+
return startVertInfo;
}
else {
@@ -179,17 +184,17 @@ public class ModelBasedProcessing {
String modInfoStr = "";
if( passedModelVersionId != null && !passedModelVersionId.equals("") ){
modTopNodeType = getModelVerTopWidgetType( transId, fromAppId, passedModelVersionId, "", "" );
- modInfoStr = "modelVersionId = (" + passedModelVersionId + ")";
+ modInfoStr = "modelVersionId = (" + passedModelVersionId + ")";
}
else if( passedModelInvId != null && !passedModelInvId.equals("") ){
modTopNodeType = getModelVerTopWidgetType( transId, fromAppId,"", passedModelInvId, "" );
- modInfoStr = "modelId = (" + passedModelInvId + ")";
+ modInfoStr = "modelId = (" + passedModelInvId + ")";
}
else if( passedModelName != null && !passedModelName.equals("") ){
modTopNodeType = getModelVerTopWidgetType( transId, fromAppId,"", "", passedModelName );
- modInfoStr = "modelName = (" + passedModelName + ")";
+ modInfoStr = "modelName = (" + passedModelName + ")";
}
-
+
if( modTopNodeType.equals("") ){
if( (passedTopNodeType == null) || passedTopNodeType.equals("") ){
String msg = "Could not determine the top-node nodeType for this request. modelInfo: [" + modInfoStr + "]";
@@ -202,16 +207,16 @@ public class ModelBasedProcessing {
}
}
else {
- // we did get a topNode type based on model info - make sure it doesn't contradict
+ // we did get a topNode type based on model info - make sure it doesn't contradict
// the passsed-in one (if there is one)
- if( passedTopNodeType != null && !passedTopNodeType.equals("")
+ if( passedTopNodeType != null && !passedTopNodeType.equals("")
&& !passedTopNodeType.equals(modTopNodeType) ){
throw new AAIException("AAI_6120", "topNodeType passed in [" + passedTopNodeType
+ "] does not match nodeType derived for model info passed in: ["
- + modTopNodeType + "]");
+ + modTopNodeType + "]");
}
}
-
+
List<String> modelVersionIds2Check = new ArrayList<>();
if( (passedModelName != null && !passedModelName.equals("")) ){
// They passed a modelName, so find all the model UUIDs (model-version-id's) that map to this
@@ -231,7 +236,7 @@ public class ModelBasedProcessing {
modelVersionIds2Check.add(passedModelVersionId);
}
}
-
+
// We should now be OK with our topNodeType for this request, so we can look for the actual startNodes
for( int i=0; i < startNodeFilterArrayOfHashes.size(); i++ ){
// Locate the starting node which will be used to look which corresponds to this set of filter data
@@ -244,42 +249,42 @@ public class ModelBasedProcessing {
startVtx = result.get();
}
catch( AAIException e ){
- String msg = "Could not find startNode of type = [" + modTopNodeType + "], given these params: "
+ String msg = "Could not find startNode of type = [" + modTopNodeType + "], given these params: "
+ startNodeFilterArrayOfHashes.get(i) + ". msg # from getUniqueNode() = " + e.getMessage();
throw new AAIException("AAI_6114", msg);
}
-
+
String vid = startVtx.id().toString();
String personaModInvId = startVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
String personaModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
-
+
// Either this start-node has persona info (which should not contradict any passed-in model info)
// or they should have passed in the model to use - so we'd just use that.
if( personaModVerId != null && !personaModVerId.equals("") ){
// There is persona data in this start-node. So make sure it doesn't contradict any "passed" stuff
- if( modelVersionIds2Check.isEmpty()
+ if( modelVersionIds2Check.isEmpty()
&& (passedModelInvId == null || passedModelInvId.equals("")) ){
// They didn't pass any model info, so use the persona one.
startVertInfo.put(vid, personaModVerId);
}
- else if( modelVersionIds2Check.isEmpty()
+ else if( modelVersionIds2Check.isEmpty()
&& (passedModelInvId != null && !passedModelInvId.equals("")) ){
// They passed in just the modelId - so check it
if( passedModelInvId.equals(personaModInvId) ){
startVertInfo.put(vid, personaModVerId);
}
}
- else if( !modelVersionIds2Check.isEmpty()
+ else if( !modelVersionIds2Check.isEmpty()
&& (passedModelInvId == null || passedModelInvId.equals("")) ){
// They passed in just modelVersionId - so check
if( modelVersionIds2Check.contains(personaModVerId) ){
startVertInfo.put(vid, personaModVerId);
}
- }
- else if( !modelVersionIds2Check.isEmpty()
+ }
+ else if( !modelVersionIds2Check.isEmpty()
&& (passedModelInvId != null && !passedModelInvId.equals("")) ){
- // We have BOTH a modelVersionIds and a modelId to check
- if( passedModelInvId.equals(personaModInvId)
+ // We have BOTH a modelVersionIds and a modelId to check
+ if( passedModelInvId.equals(personaModInvId)
&& modelVersionIds2Check.contains(personaModVerId) ){
startVertInfo.put(vid, personaModVerId);
}
@@ -293,14 +298,16 @@ public class ModelBasedProcessing {
}
else {
throw new AAIException("AAI_6118", "Found startNode but since it does not have persona data, the " +
- " model-version-id is required. ");
+ " model-version-id is required. ");
}
}
}
}
+
return startVertInfo;
- }
-
+
+ }//end of getStartNodesAndModVersionIds()
+
/**
* Query by model. (really model-ver)
@@ -324,16 +331,16 @@ public class ModelBasedProcessing {
List<Map<String,Object>> startNodeFilterArrayOfHashes,
String apiVer )
throws AAIException {
-
+
final String transId_f = transId;
final String fromAppId_f = fromAppId;
final String modelVersionId_f = modelVersionId;
final String modelInvId_f = modelInvariantId;
final String modelName_f = modelName;
final String topNodeType_f = topNodeType;
- final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
- final String apiVer_f = apiVer;
-
+ final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
+ final String apiVer_f = apiVer;
+
// Find out what our time-limit should be
int timeLimitSec = 0;
String timeLimitString = AAIConfig.get("aai.model.query.timeout.sec");
@@ -345,33 +352,34 @@ public class ModelBasedProcessing {
// Don't worry, we will leave the limit as zero - which tells us not to use it.
}
}
-
+
if( timeLimitSec <= 0 ){
// We will NOT be using a timer
return queryByModel_Timed( transId, fromAppId,
- modelVersionId,
+ modelVersionId,
modelInvariantId,
modelName,
topNodeType,
- startNodeFilterArrayOfHashes,
+ startNodeFilterArrayOfHashes,
apiVer );
}
-
+
List<ResultSet> resultList = new ArrayList<>();
TimeLimiter limiter = new SimpleTimeLimiter();
try {
- resultList = limiter.callWithTimeout(new Callable <List<ResultSet>>() {
- public List<ResultSet> call() throws AAIException {
+
+ resultList = limiter.callWithTimeout(new AaiCallable <List<ResultSet>>() {
+ public List<ResultSet> process() throws AAIException {
return queryByModel_Timed( transId_f, fromAppId_f,
- modelVersionId_f,
+ modelVersionId_f,
modelInvId_f,
modelName_f,
topNodeType_f,
- startNodeFilterArrayOfHashes_f,
+ startNodeFilterArrayOfHashes_f,
apiVer_f );
}
}, timeLimitSec, TimeUnit.SECONDS, true);
- }
+ }
catch (AAIException ae) {
// Re-throw AAIException so we get can tell what happened internally
throw ae;
@@ -382,10 +390,11 @@ public class ModelBasedProcessing {
catch (Exception e) {
throw new AAIException("AAI_6128", "Unexpected exception in queryByModel(): " + e.getMessage() );
}
+
return resultList;
}
-
-
+
+
/**
* Query by model (model-ver) timed.
*
@@ -408,28 +417,28 @@ public class ModelBasedProcessing {
List<Map<String,Object>> startNodeFilterArrayOfHashesVal,
String apiVer )
throws AAIException {
-
+
List<ResultSet> resultArray = new ArrayList<>();
-
+
// NOTE: this method can be used for different styles of queries:
// a) They could pass neither a modelVersionId or a modelInvariantId but just pass a set of data defining start-nodes.
// Note - with no model info, we need them to pass the startNodeType for us to be able to use the
- // start-node-filter data. We would look at each start node and ensure that each has persona-model info.
+ // start-node-filter data. We would look at each start node and ensure that each has persona-model info.
// Then use whatever model corresponds to each instance to pull that instance's data.
// b) They could pass a modelInvariantId, but no modelVersionId and no startNode info. In this case, we
- // Would look in the database for all nodes that have a model-invariant-id-local that matches what was
+ // Would look in the database for all nodes that have a model-invariant-id-local that matches what was
// passed, and then for each of those instances, pull the data based on the corresponding model.
- // c) They could pass a model-version-id, but no startNode info. We'd make sure that if a
+ // c) They could pass a model-version-id, but no startNode info. We'd make sure that if a
// model-invariant-id was also passed, that it does not conflict - but it really should be null if they
// are passing a full model-version-id. Like case -b-, we'd do a query for all nodes
- // that have persona info that corresponds to the model-version-id passed and then
+ // that have persona info that corresponds to the model-version-id passed and then
// collect data for each one.
// d) They could pass either modelVersionId or modelInvariantId AND startNodeFilter info. In this case we
- // would look at the model info to figure out what the top-node-type is, then look at the
+ // would look at the model info to figure out what the top-node-type is, then look at the
// top-node instances based on the startNodeFilter. We'd only collect data for each instance if
// it's persona model info matches what was passed in.
-
-
+
+
// Sorry to do this, but code that gets called with an empty hash as the first array element was causing errors
List<Map<String,Object>> startNodeFilterArrayOfHashes = new ArrayList <Map<String,Object>>();
if( !startNodeFilterArrayOfHashesVal.isEmpty() ){
@@ -440,21 +449,21 @@ public class ModelBasedProcessing {
}
}
}
-
+
// ----------------------------------------------------------------------------------------------------------
- // Get a Hash of all the start-nodes (top instance-data node for a model-ver where we will
- // start collecting data) for startNode2ModelVerHash:
- // key = vertex-id for the startNode,
- // value = model-version-id for the corresponding model-ver
+ // Get a Hash of all the start-nodes (top instance-data node for a model-ver where we will
+ // start collecting data) for startNode2ModelVerHash:
+ // key = vertex-id for the startNode,
+ // value = model-version-id for the corresponding model-ver
// ----------------------------------------------------------------------------------------------------------
Map<String, String> startNode2ModelVerHash = getStartNodesAndModVersionIds( transId, fromAppId,
modelVersionId, modelInvariantId, modelName, topNodeType,
- startNodeFilterArrayOfHashes, apiVer );
-
- //System.out.println("\nDEBUG -- Here's a dump of the startnodes/model-vers: " + startNode2ModelVerHash.toString());
-
+ startNodeFilterArrayOfHashes, apiVer );
+
+ //System.out.println("\nDEBUG -- Here's a dump of the startnodes/model-vers: " + startNode2ModelVerHash.toString());
+
// --------------------------------------------------------------------------------------------------------
- // Figure out what-all models (model-ver nodes) we will be dealing with
+ // Figure out what-all models (model-ver nodes) we will be dealing with
// Note - Instances must all use the same type of start-node, but do not have to all use the same model-ver.
// --------------------------------------------------------------------------------------------------------
Map<String, Vertex> distinctModelVersHash = new HashMap<>();
@@ -469,7 +478,7 @@ public class ModelBasedProcessing {
Set <String> snKeySet = startNode2ModelVerHash.keySet();
Iterator<String> startNodeIterator = snKeySet.iterator();
while( startNodeIterator.hasNext() ){
- String modVerIdKey = startNodeIterator.next();
+ String modVerIdKey = (String) startNodeIterator.next();
String modVerId = startNode2ModelVerHash.get(modVerIdKey);
if( !distinctModelVersHash.containsKey(modVerId) ){
// First time seeing this model-version-id
@@ -487,7 +496,7 @@ public class ModelBasedProcessing {
System.out.println(">>> WARNING - will not collect model data for this vertex since " +
"it uses an inconsistant model-ver model. Model-version-id = " + modVerId );
}
-
+
if( tmpNodeType != null && !tmpNodeType.equals("") ){
if( startNodeType.equals("") ){
startNodeType = tmpNodeType;
@@ -501,12 +510,12 @@ public class ModelBasedProcessing {
}
}
}
-
+
//System.out.println("\nDEBUG -- Here's a dump of the DISTINCT model-ver hash: " + distinctModelVersHash.toString() );
-
+
// ------------------------------------------------------------------------------------------------------
// Get the "valid-next-step" hash for each distinct model-ver
- // While we're at it, get a mapping of model-invariant-id|model-version to model-version-id for
+ // While we're at it, get a mapping of model-invariant-id|model-version to model-version-id for
// the model-vers being used
// ------------------------------------------------------------------------------------------------------
Map<String, Multimap<String, String>> validNextStepHash = new HashMap<>();
@@ -514,46 +523,46 @@ public class ModelBasedProcessing {
Set <String> keySet = distinctModelVersHash.keySet();
Iterator<String> modelVerIterator = keySet.iterator();
while( modelVerIterator.hasNext() ){
- String modVerKey = modelVerIterator.next();
+ String modVerKey = (String) modelVerIterator.next();
if( ! skipModelVerIdList.contains(modVerKey) ){
- Vertex modelVerVtx = distinctModelVersHash.get(modVerKey);
+ Vertex modelVerVtx = (Vertex)distinctModelVersHash.get(modVerKey);
Multimap<String, String> tmpTopoMap = genTopoMap4ModelVer( transId, fromAppId,
modelVerVtx, modVerKey);
validNextStepHash.put(modVerKey, tmpTopoMap);
}
- }
-
+ }
+
// -------------------------------------------------------------------------------------------------
- // Figure out what the "start-node" for each instance will be (plus the info we will use to
+ // Figure out what the "start-node" for each instance will be (plus the info we will use to
// represent that in our topology)
// -------------------------------------------------------------------------------------------------
List<String> failedPersonaCheckVids = new ArrayList<>();
- Map<String, String> firstStepInfoHash = new HashMap<>();
+ Map<String, String> firstStepInfoHash = new HashMap<>();
// For firstStepInfoHash: key = startNodeVtxId, val=topNodeType plus personaData if applicable
// ie. the value is what we'd use as the "first-step" for this model.
if( !nodeTypeSupportsPersona( startNodeType) ){
- // This node type doesn't have persona info, so we just use startNodeType for the first-step-info
+ // This node type doesn't have persona info, so we just use startNodeType for the first-step-info
snKeySet = startNode2ModelVerHash.keySet();
startNodeIterator = snKeySet.iterator();
while( startNodeIterator.hasNext() ){
- String vtxKey = startNodeIterator.next();
+ String vtxKey = (String) startNodeIterator.next();
firstStepInfoHash.put(vtxKey,startNodeType);
}
}
- else {
+ else {
// Need to check that this node's persona data is good and if it is - use it for the first step info
snKeySet = startNode2ModelVerHash.keySet();
startNodeIterator = snKeySet.iterator();
while( startNodeIterator.hasNext() ){
- String vtxKey = startNodeIterator.next();
+ String vtxKey = (String) startNodeIterator.next();
Iterator<Vertex> vtxIterator = this.engine.asAdmin().getReadOnlyTraversalSource().V(vtxKey);
- Vertex tmpVtx = vtxIterator.next();
+ Vertex tmpVtx = (Vertex)vtxIterator.next();
String thisVtxModelVerId = startNode2ModelVerHash.get(vtxKey);
if( skipModelVerIdList.contains(thisVtxModelVerId) ){
// Skip this vertex because it uses a model-ver that is bad
continue;
}
- Vertex modelVerVtx = distinctModelVersHash.get(thisVtxModelVerId);
+ Vertex modelVerVtx = (Vertex)distinctModelVersHash.get(thisVtxModelVerId);
Vertex modelVtx = getModelGivenModelVer( modelVerVtx, "" );
String modInvId = modelVtx.<String>property("model-invariant-id").orElse(null);
String personaModInvId = tmpVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
@@ -562,21 +571,21 @@ public class ModelBasedProcessing {
String tmpPersonaInfoStr = startNodeType + "," + personaModInvId + "," + personaModVerId;
firstStepInfoHash.put(vtxKey, tmpPersonaInfoStr );
}
- else {
+ else {
// we won't use this start node below when we collect data because it should have
// had persona data that matched it's model - but it did not.
failedPersonaCheckVids.add(vtxKey);
}
- }
- }
+ }
+ }
//System.out.println("\nDEBUG -- Here's a dump of the firstStepInfoHash hash: " + firstStepInfoHash.toString() );
-
+
// ------------------------------------------------------------------------------------------------
- // Loop through each start-node, collect it's data using collectInstanceData() and put the
+ // Loop through each start-node, collect it's data using collectInstanceData() and put the
// resultSet onto the resultArray.
// ------------------------------------------------------------------------------------------------
-
+
// Make sure they're not bringing back too much data
String maxString = AAIConfig.get("aai.model.query.resultset.maxcount");
if( maxString != null && !maxString.equals("") ){
@@ -587,17 +596,17 @@ public class ModelBasedProcessing {
catch ( Exception nfe ){
// Don't worry, we will leave the max as zero - which tells us not to use it.
}
-
+
if( maxSets > 0 && (startNode2ModelVerHash.size() > maxSets) ){
String msg = " Query returns " + startNode2ModelVerHash.size() + " resultSets. Max allowed is: " + maxSets;
throw new AAIException("AAI_6141", msg);
}
}
-
+
snKeySet = startNode2ModelVerHash.keySet();
startNodeIterator = snKeySet.iterator();
while( startNodeIterator.hasNext() ){
- String topNodeVtxId = startNodeIterator.next();
+ String topNodeVtxId = (String) startNodeIterator.next();
if( failedPersonaCheckVids.contains(topNodeVtxId) ){
// Skip this vertex because it failed it's persona-data check above
continue;
@@ -606,27 +615,29 @@ public class ModelBasedProcessing {
// Skip this vertex because it uses a model-ver that is bad
continue;
}
-
+
Iterator<Vertex> vtxIterator = this.engine.asAdmin().getReadOnlyTraversalSource().V(topNodeVtxId);
- Vertex tmpStartVtx = vtxIterator.next();
- String elementLocationTrail = firstStepInfoHash.get(topNodeVtxId);
+ Vertex tmpStartVtx = (Vertex)vtxIterator.next();
+ String elementLocationTrail = firstStepInfoHash.get(topNodeVtxId);
String modelVerId = startNode2ModelVerHash.get(topNodeVtxId);
Multimap<String, String> validNextStepMap = validNextStepHash.get(modelVerId);
-
+
List<String> vidsTraversed = new ArrayList<>();
Map<String,String> emptyDelKeyHash = new HashMap<>();
Map<String,String> emptyNQElementHash = new HashMap<>(); // Only applies to Named Queries
ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
- tmpStartVtx, elementLocationTrail,
+ tmpStartVtx, elementLocationTrail,
validNextStepMap, vidsTraversed, 0, emptyDelKeyHash, emptyNQElementHash, apiVer );
-
+
resultArray.add(tmpResSet);
}
+
return resultArray;
- }
-
-
-
+
+ }// queryByModel_Timed()
+
+
+
/**
* Run delete by model-ver.
*
@@ -641,15 +652,15 @@ public class ModelBasedProcessing {
* @throws AAIException the AAI exception
*/
public Map<String,String> runDeleteByModel( String transId, String fromAppId,
- String modelVersionId, String topNodeTypeVal, Map<String,Object> startNodeFilterHash, String apiVer, String resVersion )
+ String modelVersionId, String topNodeTypeVal, Map<String,Object> startNodeFilterHash, String apiVer, String resVersion )
throws AAIException {
-
+
Map<String,String> retHash = new HashMap<>();
-
- // Locate the Model-ver node to be used
+
+ // Locate the Model-ver node to be used
Vertex modelVerVtx = null;
if( modelVersionId != null && !modelVersionId.equals("") ){
- modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
"model-version-id", modelVersionId);
}
else {
@@ -658,28 +669,28 @@ public class ModelBasedProcessing {
if( topNodeTypeVal == null || topNodeTypeVal.equals("") ){
throw new AAIException("AAI_6118", "If no model info is passed, then topNodeType is required. ");
}
-
+
Optional<Vertex> result = dbMethHelper.searchVertexByIdentityMap(topNodeTypeVal, startNodeFilterHash);
if (!result.isPresent()) {
throw new AAIException("AAI_6114", "No Node of type " + topNodeTypeVal + " found for properties");
}
Vertex startVtx = result.get();
-
+
String startVertModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
- modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
"model-version-id", startVertModVerId);
}
-
+
if( modelVerVtx == null ){
throw new AAIException("AAI_6114", "Could not determine the model-ver for the given input parameters. ");
}
String topNType = "unknown";
String modelType = getModelTypeFromModelVer( modelVerVtx, "" );
-
+
if( modelType.equals("widget") ){
- // If they want to delete using a widget-level model.. That is just a delete of the one
- // instance of one of our nodes.
+ // If they want to delete using a widget-level model.. That is just a delete of the one
+ // instance of one of our nodes.
String widgModNodeType = modelVerVtx.<String>property("model-name").orElse(null);
if( (widgModNodeType == null) || widgModNodeType.equals("") ){
String msg = "Could not find model-name for the widget model [" + modelVersionId + "].";
@@ -695,11 +706,11 @@ public class ModelBasedProcessing {
retHash.put(widgId, widgModNodeType);
return retHash;
}
-
+
// ---------------------------------------------------------------------------------
// If we got to here, this must be either a service or resource model.
// So, we'll need to get a Hash of which parts of the model to delete.
- // NOTE- deleteByModel is deleting data based on one specific version of a model.
+ // NOTE- deleteByModel is deleting data based on one specific version of a model.
// ---------------------------------------------------------------------------------
String chkFirstNodePersonaModInvId = "";
String chkFirstNodePersonaModVerId = "";
@@ -716,18 +727,18 @@ public class ModelBasedProcessing {
chkFirstNodePersonaModVerId = modelVerVtx.<String>property("model-version-id").orElse(null);
personaData = "," + chkFirstNodePersonaModInvId + "," + chkFirstNodePersonaModVerId;
}
-
+
// Get the deleteKeyHash for this model
String incomingTrail = "";
Map<String, String> currentHash = new HashMap<>();
Map<String, Vertex> modConHash = new HashMap<>();
ArrayList <String> vidsTraversed = new ArrayList<>();
Map<String, String> delKeyHash = collectDeleteKeyHash( transId, fromAppId,
- firstModElementVertex, incomingTrail, currentHash, vidsTraversed,
- 0, modConHash,
- chkFirstNodePersonaModInvId, chkFirstNodePersonaModVerId );
-
-
+ firstModElementVertex, incomingTrail, currentHash, vidsTraversed,
+ 0, modConHash,
+ chkFirstNodePersonaModInvId, chkFirstNodePersonaModVerId );
+
+
System.out.println("\n ----DEBUG -----: Delete Hash for model: [" + modelVersionId + "] looks like: ");
for( Map.Entry<String, String> entry : delKeyHash.entrySet() ){
System.out.println("key = [" + entry.getKey() + "], val = [" + entry.getValue() + "]");
@@ -744,7 +755,7 @@ public class ModelBasedProcessing {
// we need to make sure that the start node matches the persona values.
String startVertPersonaModInvId = startVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
String startVertPersonaModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
- if( !chkFirstNodePersonaModInvId.equals(startVertPersonaModInvId)
+ if( !chkFirstNodePersonaModInvId.equals(startVertPersonaModInvId)
|| !chkFirstNodePersonaModVerId.equals(startVertPersonaModVerId) ){
String msg = "Persona-Model data mismatch for start node (" + topNType + "), " +
startNodeFilterHash ;
@@ -752,34 +763,36 @@ public class ModelBasedProcessing {
}
}
String topVid = startVtx.id().toString();
-
+
// Read the model-ver into a Map for processing
Multimap<String, String> validNextStepMap = genTopoMap4ModelVer(transId, fromAppId,
modelVerVtx, modelVersionId);
-
+
// Collect the data
String elementLocationTrail = topNType + personaData;
vidsTraversed = new ArrayList<>();
- Map<String,String> emptyHash = new HashMap<>();
-
+ Map<String,String> emptyHash = new HashMap<>();
+
// Pass emptyHash for the NQElement hash since that parameter only applies to Named Queries
ResultSet retResSet = collectInstanceData( transId, fromAppId,
- startVtx, elementLocationTrail,
+ startVtx, elementLocationTrail,
validNextStepMap, vidsTraversed, 0, delKeyHash, emptyHash, apiVer );
-
+
// Note: the new ResultSet will have each element tagged with the del flag so we'll know if it
- // should be deleted or not - so loop through the results in a try-block since some things
+ // should be deleted or not - so loop through the results in a try-block since some things
// will get auto-deleted by parents before we get to them --- and try to remove each one.
String vidToResCheck = topVid;
-
- retHash = deleteAsNeededFromResultSet( transId, fromAppId, retResSet,
+
+ retHash = deleteAsNeededFromResultSet( transId, fromAppId, retResSet,
vidToResCheck, apiVer, resVersion, emptyHash );
//String msgStr = "processed deletes for these vids: (\n"+ retHash.keySet().toString() + ").";
+
return retHash;
- }
-
-
+
+ }// End of runDeleteByModel()
+
+
/**
* Delete as needed from result set.
*
@@ -800,26 +813,26 @@ public class ModelBasedProcessing {
Map<String,String> retHash = new HashMap<>();
retHash.putAll( hashSoFar );
Boolean deleteIt = false;
-
+
if( resSet.getVert() == null ){
return retHash;
}
-
+
Vertex thisVtx = resSet.getVert();
String thisGuyId = "";
String thisNT = "";
String thisGuyStr = "";
-
+
Boolean gotVtxOK = false;
try {
if( thisVtx != null ){
thisGuyId = thisVtx.id().toString();
thisNT = thisVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
thisGuyStr = thisGuyId + "[" + thisNT + " found at:" + resSet.getLocationInModelSubGraph() + "]";
-
- // NOTE -- will try to set the NodeType to itself to see if the node has been deleted already in
- // this transaction. It lets you get properties from nodes being deleted where the
- // delete hasn't been committed yet. This check used to be accomplished with a call to
+
+ // NOTE -- will try to set the NodeType to itself to see if the node has been deleted already in
+ // this transaction. It lets you get properties from nodes being deleted where the
+ // delete hasn't been committed yet. This check used to be accomplished with a call to
// "vtx.isRemoved()" but that was a Titan-only feature and is not available anymore since
// we no longer use Titan vertices.
// If we don't do this check, we get errors later when we try to delete the node.
@@ -829,19 +842,19 @@ public class ModelBasedProcessing {
}
catch (Exception ex) {
// Sometimes things have already been deleted by the time we get to them - just log it.
- LOGGER.warn("Exception when trying to delete: " + thisGuyStr + ". msg = " + ex.getMessage(), ex);
+ LOGGER.warn("Exception when trying to delete: " + thisGuyStr + ". msg = " + ex.getMessage() + LogFormatTools.getStackTop(ex));
}
-
+
if( !gotVtxOK ){
// The vertex must have already been removed. Just return.
- // Note - We need to catch this because the DB sometimes can still have the vtx
+ // Note - We need to catch this because the DB sometimes can still have the vtx
// and be able to get its ID but it is flagged internally as removed already.
return retHash;
}
else {
if( resSet.getNewDataDelFlag() != null && resSet.getNewDataDelFlag().equals("T") ){
LOGGER.info(">> will try to delete this one >> " + thisGuyStr);
-
+
try {
Boolean requireResourceVersion = false;
if( thisGuyId.equals(vidToResCheck) ){
@@ -859,48 +872,50 @@ public class ModelBasedProcessing {
else {
String errText = ae.getErrorObject().getErrorText();
String errDetail = ae.getMessage();
- LOGGER.warn("Exception when deleting " + thisGuyStr + ". ErrorCode = " + errorCode +
+ LOGGER.warn("Exception when deleting " + thisGuyStr + ". ErrorCode = " + errorCode +
", errorText = " + errText + ", details = " + errDetail);
}
}
catch( Exception e ){
- // We'd expect to get a "node not found" here sometimes depending on the order that
+ // We'd expect to get a "node not found" here sometimes depending on the order that
// the model has us finding / deleting nodes.
// Ignore the exception - but log it so we can see what happened.
- LOGGER.warn("Exception when deleting " + thisGuyStr + e.getMessage(), e);
+ LOGGER.warn("Exception when deleting " + thisGuyStr + e.getMessage() + LogFormatTools.getStackTop(e));
}
-
+
// We can't depend on a thrown exception to tell us if a node was deleted since it may
- // have been auto=deleted before this removeAaiNode() call.
+ // have been auto=deleted before this removeAaiNode() call.
// --- Not sure if we would want to check anything here -- because the graph.commit() is done outside of this call.
-
+
deleteIt = true;
}
else {
- // --- DEBUG ----
+ // --- DEBUG ----
System.out.println(">>>>>>> NOT DELETING THIS ONE >>>> " + thisGuyStr );
List<String> retArr = dbMethHelper.getVertexProperties(thisVtx);
for( String info : retArr ){ System.out.println(info); }
// --- DEBUG ----
}
}
-
+
// Now call this routine for the sub-resultSets
List <ResultSet> subResultSetList = resSet.getSubResultSet();
Iterator <ResultSet> subResSetIter = subResultSetList.iterator();
while( subResSetIter.hasNext() ){
ResultSet tmpSubResSet = subResSetIter.next();
- retHash = deleteAsNeededFromResultSet( transId, fromAppId, tmpSubResSet,
+ retHash = deleteAsNeededFromResultSet( transId, fromAppId, tmpSubResSet,
vidToResCheck, apiVer, resVersion, retHash );
}
-
+
if( deleteIt ){
retHash.put(thisGuyId, thisGuyStr);
}
+
return retHash;
- }
-
-
+
+ }// deleteAsNeededFromResultSet()
+
+
/**
* Query by named query (old version).
@@ -918,18 +933,18 @@ public class ModelBasedProcessing {
ArrayList <Map<String,Object>> startNodeFilterArrayOfHashes,
String apiVer )
throws AAIException {
-
+
String dummyCutPoint = null;
Map<String,Object> dummySecondaryFilterHash = null;
-
+
return queryByNamedQuery( transId, fromAppId,
- namedQueryUuid,
- startNodeFilterArrayOfHashes,
+ namedQueryUuid,
+ startNodeFilterArrayOfHashes,
apiVer,
dummyCutPoint,
- dummySecondaryFilterHash );
+ dummySecondaryFilterHash );
}
-
+
/**
* Query by named query.
@@ -951,15 +966,15 @@ public class ModelBasedProcessing {
String secondaryFilterCutPoint,
Map<String,Object> secondaryFilterHash )
throws AAIException {
-
+
final String transId_f = transId;
final String fromAppId_f = fromAppId;
final String namedQueryUuid_f = namedQueryUuid;
- final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
- final String apiVer_f = apiVer;
- final String secondaryFilterCutPoint_f = secondaryFilterCutPoint;
- final Map<String,Object> secondaryFilterHash_f = secondaryFilterHash;
-
+ final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
+ final String apiVer_f = apiVer;
+ final String secondaryFilterCutPoint_f = secondaryFilterCutPoint;
+ final Map<String,Object> secondaryFilterHash_f = secondaryFilterHash;
+
// Find out what our time-limit should be
int timeLimitSec = 0;
String timeLimitString = AAIConfig.get("aai.model.query.timeout.sec");
@@ -976,27 +991,27 @@ public class ModelBasedProcessing {
// We will NOT be using a timer
return queryByNamedQuery_Timed( transId, fromAppId,
namedQueryUuid,
- startNodeFilterArrayOfHashes,
+ startNodeFilterArrayOfHashes,
apiVer,
secondaryFilterCutPoint_f,
secondaryFilterHash_f );
}
-
+
List<ResultSet> resultList = new ArrayList<>();
TimeLimiter limiter = new SimpleTimeLimiter();
try {
- resultList = limiter.callWithTimeout(new Callable <List<ResultSet>>() {
- public List<ResultSet> call() throws AAIException {
+ resultList = limiter.callWithTimeout(new AaiCallable <List<ResultSet>>() {
+ public List<ResultSet> process() throws AAIException {
return queryByNamedQuery_Timed( transId_f, fromAppId_f,
namedQueryUuid_f,
- startNodeFilterArrayOfHashes_f,
+ startNodeFilterArrayOfHashes_f,
apiVer_f,
secondaryFilterCutPoint_f,
secondaryFilterHash_f );
}
}, timeLimitSec, TimeUnit.SECONDS, true);
-
- }
+
+ }
catch (AAIException ae) {
// Re-throw AAIException so we get can tell what happened internally
throw ae;
@@ -1007,20 +1022,21 @@ public class ModelBasedProcessing {
catch (Exception e) {
throw new AAIException("AAI_6128", "Unexpected exception in queryByNamedQuery(): " + e.getMessage() );
}
+
return resultList;
}
-
-
+
+
/**
* Query by named query timed.
*
* @param transId the trans id
* @param fromAppId the from app id
* @param namedQueryUuid the named query uuid
- * @param startNodeFilterArrayOfHashes the start node filter array of hashes --used to locate the first nodes of instance data
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes --used to locate the first nodes of instance data
* @param apiVer the api ver
* @param secondaryFilterCutPoint the nodeType where we will parse for the secondary Filter
- * @param secondaryFilterHash the secondary filter hash
+ * @param secondaryFilterHash the secondary filter hash
* @return resultSet
* @throws AAIException the AAI exception
*/
@@ -1030,13 +1046,13 @@ public class ModelBasedProcessing {
String apiVer,
String secondaryFilterCutPoint,
Map<String,Object> secondaryFilterHash
- )
+ )
throws AAIException {
-
+
// Locate the Query to be used
Vertex queryVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query",
"named-query-uuid", namedQueryUuid);
-
+
// Get the first/top named-query-element used by this query
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, queryVtx, "named-query-element");
Vertex firstNqElementVert = null;
@@ -1047,7 +1063,7 @@ public class ModelBasedProcessing {
count++;
topNType = getNqElementWidgetType( transId, fromAppId, firstNqElementVert, "" );
}
-
+
if( count < 1 ){
// A named query must start with a single top element
throw new AAIException("AAI_6133", "No top-node defined for named-query-uuid = [" + namedQueryUuid + "]");
@@ -1060,7 +1076,7 @@ public class ModelBasedProcessing {
String msg = "Could not determine the top-node nodeType for Named Query: [" + namedQueryUuid + "]";
throw new AAIException("AAI_6133", msg);
}
-
+
// Read the topology into a hash for processing
Multimap<String, String> validNextStepMap = genTopoMap4NamedQ(transId, fromAppId, queryVtx, namedQueryUuid);
@@ -1077,7 +1093,7 @@ public class ModelBasedProcessing {
boolean foundIndexedField = false;
int propertiesSet = 0;
while( propIter.hasNext() ){
- String oldVtxKey = propIter.next();
+ String oldVtxKey = (String) propIter.next();
String newKey = oldVtxKey;
String [] parts = oldVtxKey.split("\\.");
if( parts.length == 2 ){
@@ -1121,7 +1137,7 @@ public class ModelBasedProcessing {
}
}
}
-
+
if (startVertList.isEmpty()) {
throw new AAIException("AAI_6114", "No Node of type " + topNType + " found for properties");
}
@@ -1134,7 +1150,7 @@ public class ModelBasedProcessing {
throw new AAIException("AAI_6141", msg);
}
}
-
+
// Loop through each start node and get its data
List<ResultSet> resSetList = new ArrayList<>();
for( int i = 0; i < startVertList.size(); i++ ){
@@ -1143,21 +1159,21 @@ public class ModelBasedProcessing {
String elementLocationTrail = topNType;
ArrayList <String> vidsTraversed = new ArrayList<>();
Map<String,String> emptyDelKeyHash = new HashMap<>(); // Does not apply to Named Queries
-
+
// Get the mapping of namedQuery elements to our widget topology for this namedQuery
String incomingTrail = "";
Map<String, String> currentHash = new HashMap<>();
-
+
Map<String,String> namedQueryElementHash = collectNQElementHash( transId, fromAppId,
firstNqElementVert, incomingTrail, currentHash, vidsTraversed, 0 );
-
+
vidsTraversed = new ArrayList<>();
ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
- startVtx, elementLocationTrail,
+ startVtx, elementLocationTrail,
validNextStepMap, vidsTraversed, 0, emptyDelKeyHash, namedQueryElementHash, apiVer );
resSetList.add(tmpResSet);
}
-
+
// If a secondary filter was defined, we will prune the collected instance data result set(s) based on it.
List<ResultSet> prunedResSetList = new ArrayList<>();
if( resSetList != null && !resSetList.isEmpty() ){
@@ -1174,8 +1190,8 @@ public class ModelBasedProcessing {
}
}
}
-
- // Since a NamedQuery can mark some nodes as "do-not-display", we need to collapse our resultSet so
+
+ // Since a NamedQuery can mark some nodes as "do-not-display", we need to collapse our resultSet so
// does not display those nodes.
List<ResultSet> collapsedResSetList = new ArrayList<>();
if( prunedResSetList != null && !prunedResSetList.isEmpty() ){
@@ -1184,7 +1200,7 @@ public class ModelBasedProcessing {
// marked all the "top" node-elements as do-not-output. Ie. the query may
// have had a top-node of "generic-vnf" which joins down to different l-interfaces.
// If they only want to see the l-interfaces, then a single result set
- // would be "collapsed" into many separate resultSets - each of which is
+ // would be "collapsed" into many separate resultSets - each of which is
// just a single l-interface.
List<ResultSet> tmpResSetList = collapseForDoNotOutput(prunedResSetList.get(i));
if( tmpResSetList != null && !tmpResSetList.isEmpty() ){
@@ -1195,11 +1211,12 @@ public class ModelBasedProcessing {
}
}
}
-
+
return collapsedResSetList;
- }
-
+
+ }// End of queryByNamedQuery()
+
/**
* Prune a result set as per a secondary filter.
*
@@ -1211,13 +1228,13 @@ public class ModelBasedProcessing {
*/
public ResultSet pruneResultSet(ResultSet resSetVal, String cutPointType, Map<String,Object> secFilterHash )
throws AAIException {
-
- // Given a ResultSet and some secondary filter info, do pruning as needed
+
+ // Given a ResultSet and some secondary filter info, do pruning as needed
ResultSet pResSet = new ResultSet();
-
- // For this ResultSet, we will see if we are on a node of the type that is our cutPoint;
+
+ // For this ResultSet, we will see if we are on a node of the type that is our cutPoint;
// then only keep it if we peek "below" and see a match for our filter.
-
+
String nt = resSetVal.getVert().<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( nt != null && nt.equals(cutPointType) ){
// We are on the type of node that may need to be "pruned" along with it's sub-results
@@ -1226,8 +1243,8 @@ public class ModelBasedProcessing {
return pResSet;
}
}
-
- // If we made it to here, we will not be pruning at this level, so we will
+
+ // If we made it to here, we will not be pruning at this level, so we will
// be returning a copy of this resultSet that has it's subResults pruned (as needed).
pResSet.setVert(resSetVal.getVert());
pResSet.setDoNotOutputFlag(resSetVal.getDoNotOutputFlag());
@@ -1236,7 +1253,7 @@ public class ModelBasedProcessing {
pResSet.setNewDataDelFlag(resSetVal.getNewDataDelFlag());
pResSet.setPropertyLimitDesc(resSetVal.getPropertyLimitDesc());
pResSet.setPropertyOverRideHash(resSetVal.getPropertyOverRideHash());
-
+
if( !resSetVal.getSubResultSet().isEmpty() ){
ListIterator<ResultSet> listItr = resSetVal.getSubResultSet().listIterator();
List<ResultSet> newSubSetList = new ArrayList<>();
@@ -1248,11 +1265,13 @@ public class ModelBasedProcessing {
}
}
pResSet.setSubResultSet(newSubSetList);
- }
+ }
+
return pResSet;
- }
-
-
+
+ }// End pruneResultSet()
+
+
/**
* Satisfies hash of filters.
*
@@ -1263,18 +1282,18 @@ public class ModelBasedProcessing {
*/
public boolean satisfiesFilters(ResultSet resSet, Map<String,Object> filterHash )
throws AAIException {
-
+
if( filterHash.isEmpty() ){
// Nothing to look for, so no, we didn't find it.
return false;
}
-
+
Iterator <?> it = filterHash.entrySet().iterator();
while( it.hasNext() ){
Map.Entry<?,?> filtEntry = (Map.Entry<?,?>) it.next();
String propNodeTypeDotName = (filtEntry.getKey()).toString();
String fpv = (filtEntry.getValue()).toString();
-
+
int periodLoc = propNodeTypeDotName.indexOf(".");
if( periodLoc <= 0 ){
String emsg = "Bad filter param key passed in: [" + propNodeTypeDotName + "]. Expected format = [nodeName.paramName]\n";
@@ -1292,11 +1311,13 @@ public class ModelBasedProcessing {
}
}
}
+
// Made it through all the filters -- it found what we were looking for.
return true;
- }
-
-
+
+ }// end of satisfiesFilters()
+
+
/**
* Filter met by this set.
*
@@ -1309,7 +1330,7 @@ public class ModelBasedProcessing {
public boolean filterMetByThisSet(ResultSet resSet, String filtNodeType, String filtPropName, String filtPropVal ) {
// Note - we are just looking for a positive match for one filter for this resultSet
// NOTE: we're expecting the filter to have a format like this: "nodeType.parameterName:parameterValue"
-
+
Vertex vert = resSet.getVert();
if( vert == null ){
return false;
@@ -1335,7 +1356,7 @@ public class ModelBasedProcessing {
}
}
}
-
+
// Didn't find a match at the this level, so check the sets below it meet the criteria
if( resSet.getSubResultSet() != null ){
ListIterator<ResultSet> listItr = resSet.getSubResultSet().listIterator();
@@ -1345,11 +1366,13 @@ public class ModelBasedProcessing {
}
}
}
+
return false;
- }
-
-
+
+ }// end of filterMetByThisSet()
+
+
/**
* Collapse for do not output.
*
@@ -1359,14 +1382,14 @@ public class ModelBasedProcessing {
*/
public List<ResultSet> collapseForDoNotOutput(ResultSet resSetVal )
throws AAIException {
-
- // Given a ResultSet -- if it is tagged to NOT be output, then replace it with
- // it's sub-ResultSets if it has any.
+
+ // Given a ResultSet -- if it is tagged to NOT be output, then replace it with
+ // it's sub-ResultSets if it has any.
List<ResultSet> colResultSet = new ArrayList<>();
-
+
if( resSetVal.getDoNotOutputFlag().equals("true") ){
// This ResultSet isn't to be displayed, so replace it with it's sub-ResultSets
- List<ResultSet> subResList = resSetVal.getSubResultSet();
+ List<ResultSet> subResList = (ArrayList<ResultSet>) resSetVal.getSubResultSet();
for( int k = 0; k < subResList.size(); k++ ){
List<ResultSet> newSubResList = collapseForDoNotOutput(subResList.get(k));
colResultSet.addAll(newSubResList);
@@ -1376,11 +1399,11 @@ public class ModelBasedProcessing {
// This set will be displayed
colResultSet.add(resSetVal);
}
-
+
// For each result set now at this level, call this same routine to collapse their sub-resultSets
for( int i = 0; i < colResultSet.size(); i++ ){
List<ResultSet> newSubSet = new ArrayList<>();
- List<ResultSet> subResList = colResultSet.get(i).getSubResultSet();
+ List<ResultSet> subResList = (ArrayList<ResultSet>) colResultSet.get(i).getSubResultSet();
for( int n = 0; n < subResList.size(); n++ ){
List<ResultSet> newSubResList = collapseForDoNotOutput(subResList.get(n));
newSubSet.addAll(newSubResList);
@@ -1388,18 +1411,19 @@ public class ModelBasedProcessing {
// Replace the old subResultSet with the collapsed set
colResultSet.get(i).setSubResultSet(newSubSet);
}
-
+
return colResultSet;
- }
-
-
-
+
+ }// End collapseForDoNotOutput()
+
+
+
/**
* Collect instance data.
*
* @param transId the trans id
* @param fromAppId the from app id
- * @param thisLevelElemVtx the element vtx at this level
+ * @param thisLevelElemVtx the element vtx at this level
* @param thisVertsTrail the this verts trail
* @param elementLocationTrail -- trail of nodeTypes that got us here (this element vertex) from the top
* @param validNextStepMap the valid next step map -- hash of valid next steps (node types) for this model
@@ -1421,33 +1445,33 @@ public class ModelBasedProcessing {
Map<String,String> namedQueryElementHash, // only applies to named-query data collecting
String apiVer
) throws AAIException {
-
+
levelCounter++;
-
+
String thisElemVid = thisLevelElemVtx.id().toString();
-
+
if( levelCounter > MAX_LEVELS ) {
throw new AAIException("AAI_6125", "collectInstanceData() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
}
-
+
ResultSet rs = new ResultSet();
if( namedQueryElementHash.containsKey(thisVertsTrail) ){
// We're collecting data for a named-query, so need to see if we need to do anything special
String nqElUuid = namedQueryElementHash.get(thisVertsTrail);
Vertex nqElementVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query-element",
"named-query-element-uuid", nqElUuid);
-
+
String tmpDoNotShow = nqElementVtx.<String>property("do-not-output").orElse(null);
if( tmpDoNotShow != null && tmpDoNotShow.equals("true") ){
rs.setDoNotOutputFlag("true");
}
-
+
if( namedQueryConstraintSaysStop(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer) ){
- // There was a property constraint which says they do not want to collect this vertex or whatever
+ // There was a property constraint which says they do not want to collect this vertex or whatever
// might be below it. Just return the empty rs here.
return rs;
}
-
+
String propLimDesc = nqElementVtx.<String>property("property-limit-desc").orElse(null);
if( (propLimDesc != null) && !propLimDesc.equals("") ){
if (propLimDesc.equalsIgnoreCase("show-all")) {
@@ -1463,13 +1487,13 @@ public class ModelBasedProcessing {
Map<String,Object> tmpPropertyOverRideHash = getNamedQueryPropOverRide(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer);
//System.out.println(" DEBUG --- USING this propertyOverride data set on ResSet [" + tmpPropertyOverRideHash.toString() + "]");
rs.setPropertyOverRideHash(tmpPropertyOverRideHash);
-
+
// See if we need to look up any "unconnected" data that needs to be associated with this result set
Map<String,Object> tmpExtraPropHash = getNamedQueryExtraDataLookup(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer);
//System.out.println(" DEBUG --- ADDING this EXTRA Lookup data to the ResSet [" + tmpExtraPropHash.toString() + "]");
rs.setExtraPropertyHash(tmpExtraPropHash);
}
-
+
rs.setVert(thisLevelElemVtx);
rs.setLocationInModelSubGraph(thisVertsTrail);
if( delKeyHash.containsKey(thisVertsTrail) && delKeyHash.get(thisVertsTrail).equals("T") ){
@@ -1478,10 +1502,10 @@ public class ModelBasedProcessing {
else {
rs.setNewDataDelFlag("F");
}
-
+
// Use Gremlin-pipeline to just look for edges that go to a valid "next-steps"
Collection <String> validNextStepColl = validNextStepMap.get(thisVertsTrail);
-
+
// Because of how we process linkage-points, we may have duplicate node-types in our next-stepMap (for one step)
// So, to keep from looking (and bringing back) the same data twice, we need to make sure our next-steps are unique
Set<String> validNextStepHashSet = new HashSet<>();
@@ -1490,20 +1514,20 @@ public class ModelBasedProcessing {
String targetStepStr = ntcItr.next();
validNextStepHashSet.add(targetStepStr);
}
-
+
List<String> tmpVidsTraversedList = new ArrayList<>();
tmpVidsTraversedList.addAll(vidsTraversed);
tmpVidsTraversedList.add(thisElemVid);
-
+
Iterator <String> ntItr = validNextStepHashSet.iterator();
while( ntItr.hasNext() ){
String targetStep = ntItr.next();
// NOTE: NextSteps can either be just a nodeType, or can be a nodeType plus
- // model-invariant-id-local and model-version-id-local (the two persona properties)
+ // model-invariant-id-local and model-version-id-local (the two persona properties)
// if those need to be checked also.
// When the persona stuff is part of the step, it is a comma separated string.
// Ie. "nodeType,model-inv-id-local,model-version-id-local" (the two "persona" props)
- //
+ //
String targetNodeType = "";
String pmid = "";
String pmv = "";
@@ -1525,7 +1549,7 @@ public class ModelBasedProcessing {
// It's just the nodeType with no other info
targetNodeType = targetStep;
}
-
+
GraphTraversal<Vertex, Vertex> modPipe = null;
if( stepIsJustNT ){
modPipe = this.engine.asAdmin().getReadOnlyTraversalSource().V(thisLevelElemVtx).both().has(AAIProperties.NODE_TYPE, targetNodeType);
@@ -1533,29 +1557,31 @@ public class ModelBasedProcessing {
else {
modPipe = this.engine.asAdmin().getReadOnlyTraversalSource().V(thisLevelElemVtx).both().has(AAIProperties.NODE_TYPE, targetNodeType).has(addDBAliasedSuffix("model-invariant-id"),pmid).has(addDBAliasedSuffix("model-version-id"),pmv);
}
-
+
if( modPipe == null || !modPipe.hasNext() ){
//System.out.println("DEBUG - didn't find any [" + targetStep + "] connected to this guy (which is ok)");
}
else {
while( modPipe.hasNext() ){
- Vertex tmpVert = modPipe.next();
+ Vertex tmpVert = (Vertex) modPipe.next();
String tmpVid = tmpVert.id().toString();
String tmpTrail = thisVertsTrail + "|" + targetStep;
if( !vidsTraversed.contains(tmpVid) ){
- // This is one we would like to use - so we'll include the result set we get for it
+ // This is one we would like to use - so we'll include the result set we get for it
ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
- tmpVert, tmpTrail,
- validNextStepMap, tmpVidsTraversedList,
+ tmpVert, tmpTrail,
+ validNextStepMap, tmpVidsTraversedList,
levelCounter, delKeyHash, namedQueryElementHash, apiVer );
-
+
rs.getSubResultSet().add(tmpResSet);
}
}
}
}
+
return rs;
- }
+
+ } // End of collectInstanceData()
/**
@@ -1572,17 +1598,17 @@ public class ModelBasedProcessing {
public Multimap<String, String> genTopoMap4ModelVer(String transId, String fromAppId,
Vertex modelVerVertex, String modelVerId)
throws AAIException {
-
+
if( modelVerVertex == null ){
throw new AAIException("AAI_6114", "null modelVerVertex passed to genTopoMap4ModelVer()");
}
-
+
Multimap<String, String> initialEmptyMap = ArrayListMultimap.create();
List<String> vidsTraversed = new ArrayList<>();
String modelType = getModelTypeFromModelVer( modelVerVertex, "" );
if( modelType.equals("widget") ){
// A widget model by itself does not have a topoplogy. That is - it has no "model-elements" which
- // define how it is connected to other things. All it has is a name which ties it to
+ // define how it is connected to other things. All it has is a name which ties it to
// an aai-node-type
Iterator<Vertex> vertI= this.traverseIncidentEdges(EdgeType.TREE, modelVerVertex, "model-element");
if( vertI != null && vertI.hasNext() ){
@@ -1593,39 +1619,42 @@ public class ModelBasedProcessing {
return initialEmptyMap;
}
}
-
+
String firstModelVerId = modelVerVertex.<String>property("model-version-id").orElse(null);
String firstModelVersion = modelVerVertex.<String>property("model-version").orElse(null);
if( firstModelVerId == null || firstModelVerId.equals("") || firstModelVersion == null || firstModelVersion.equals("") ){
throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-version-id or model-version. model-version-id = "
+ modelVerId);
}
-
+
Vertex firstElementVertex = getTopElementForSvcOrResModelVer( modelVerVertex, "" );
Vertex firstEleModVerVtx = getModelVerThatElementRepresents( firstElementVertex, "" );
- String firstElemModelType = getModelTypeFromModelVer( firstEleModVerVtx, "" );
+ String firstElemModelType = getModelTypeFromModelVer( firstEleModVerVtx, "" );
if( ! firstElemModelType.equals("widget") ){
throw new AAIException("AAI_6132", "Bad Model Definition: First element must correspond to a widget type model. Model UUID = "
+ modelVerId);
}
-
+
Vertex firstModVtx = getModelGivenModelVer( modelVerVertex, "" );
String firstModelInvId = firstModVtx.<String>property("model-invariant-id").orElse(null);
if( firstModelInvId == null || firstModelInvId.equals("") ){
throw new AAIException("AAI_6132", "Bad Model Definition: Could not find model.model-invariant-id given model-ver.model-version-id = "
+ modelVerId);
}
-
- return collectTopology4ModelVer( transId, fromAppId,
+
+ Multimap<String, String> collectedMap = collectTopology4ModelVer( transId, fromAppId,
firstElementVertex, "", initialEmptyMap, vidsTraversed, 0, null, firstModelInvId, firstModelVersion );
- }
+
+ return collectedMap;
+
+ } // End of genTopoMap4ModelVer()
public List<String> makeSureItsAnArrayList( String listStringVal ){
// We're sometimes getting a String back on db properties that should be ArrayList<String>
// Seems to be how they're defined in OXM - whether they use a "xml-wrapper" or not
// Need to translate them into ArrayLists sometimes...
-
+
List<String> retArrList = new ArrayList<String>();
String listString = listStringVal;
listString = listString.replace(" ", "");
@@ -1638,6 +1667,7 @@ public class ModelBasedProcessing {
retArrList.add(pieces[i]);
}
}
+
return retArrList;
}
@@ -1652,33 +1682,33 @@ public class ModelBasedProcessing {
*/
public Map<String, Vertex> getModConstraintHash(Vertex modelElementVtx, Map<String, Vertex> currentHash )
throws AAIException {
-
- // For a given model-element vertex, look to see if there are any "model-constraint" elements that is has
+
+ // For a given model-element vertex, look to see if there are any "model-constraint" elements that is has
// an OUT "uses" edge to. If it does, then get any "constrained-element-set" nodes that are pointed to
// by the "model-constraint". That will be the replacement "constrained-element-set". The UUID of the
// "constrained-element-set" that it is supposed to replace is found in the property:
- // model-constraint.constrained-element-set-uuid-to-replace
+ // model-constraint.constrained-element-set-uuid-to-replace
//
- // For now, that is the only type of model-constraint allowed, so that is all we will look for.
- // Pass back any of these "constrained-element-set" nodes along with any that were passed in by
+ // For now, that is the only type of model-constraint allowed, so that is all we will look for.
+ // Pass back any of these "constrained-element-set" nodes along with any that were passed in by
// the "currentHash" parameter.
-
+
if( modelElementVtx == null ){
String msg = " null modelElementVtx passed to getModConstraintHash() ";
throw new AAIException("AAI_6114", msg);
}
-
+
String modelType = modelElementVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( modelType == null || (!modelType.equals("model-element")) ){
String msg = " getModConstraintHash() called with wrong type model: [" + modelType + "]. ";
throw new AAIException("AAI_6114", msg);
}
-
+
Map<String, Vertex> thisHash = new HashMap<>();
if( currentHash != null ){
thisHash.putAll(currentHash);
}
-
+
int count = 0;
List<Vertex> modelConstraintArray = new ArrayList<>();
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, modelElementVtx, "model-constraint");
@@ -1691,7 +1721,7 @@ public class ModelBasedProcessing {
count++;
}
}
-
+
if( count > 0 ) {
for( int i = 0; i < count; i++ ){
Vertex vtxOfModelConstraint = modelConstraintArray.get(i);
@@ -1700,7 +1730,7 @@ public class ModelBasedProcessing {
// constrained-element-set to use in its place
Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.TREE, vtxOfModelConstraint, "constrained-element-set");
while( mvertI != null && mvertI.hasNext() ){
- // There better only be one...
+ // There better only be one...
Vertex tmpVert = mvertI.next();
String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( (connectToType != null) && connectToType.equals("constrained-element-set") ){
@@ -1715,9 +1745,10 @@ public class ModelBasedProcessing {
// Didn't find anything to add, so just return what they passed in.
return currentHash;
}
- }
-
-
+
+ } // End of getModConstraintHash()
+
+
/**
* Gets the top element vertex for service or resource model.
*
@@ -1727,48 +1758,50 @@ public class ModelBasedProcessing {
*/
public Vertex getTopElementForSvcOrResModelVer(Vertex modelVerVtx, String trail )
throws AAIException {
-
+
// For a "resource" or "service" type model, return the "top" element in that model
if( modelVerVtx == null ){
String msg = " null modelVertex passed to getTopoElementForSvcOrResModelVer() at [" + trail + "]. ";
throw new AAIException("AAI_6114", msg);
}
-
+
String modelVerId = modelVerVtx.<String>property("model-version-id").orElse(null);
if( modelVerId == null ){
String nt = modelVerVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( nt != null && !nt.equals("model-ver") ){
- String msg = "Illegal model defined: model element pointing to nodeType: ["
+ String msg = "Illegal model defined: model element pointing to nodeType: ["
+ nt + "], should be pointing to: [model-ver] at [" + trail + "]. ";
throw new AAIException("AAI_6132", msg);
}
}
-
+
Vertex firstElementVertex = null;
-
+
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, modelVerVtx, "model-element");
int elCount = 0;
while( vertI != null && vertI.hasNext() ){
elCount++;
firstElementVertex = vertI.next();
}
-
+
if( elCount > 1 ){
- String msg = "Illegal model defined: More than one first element defined for model-ver-id = " +
+ String msg = "Illegal model defined: More than one first element defined for model-ver-id = " +
modelVerId + " at [" + trail + "]. ";
throw new AAIException("AAI_6132", msg);
}
-
+
if( firstElementVertex == null ){
- String msg = "Could not find first model element for model-ver-id = "
+ String msg = "Could not find first model element for model-ver-id = "
+ modelVerId + " at [" + trail + "]. ";
throw new AAIException("AAI_6132", msg);
}
+
return firstElementVertex;
- }
-
-
+
+ } // End of getTopElementForSvcOrResModelVer()
+
+
/**
* Gets the named query prop over ride.
*
@@ -1783,32 +1816,34 @@ public class ModelBasedProcessing {
public Map<String,Object> getNamedQueryPropOverRide(String transId, String fromAppId,
Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
throws AAIException {
-
+
// If this model-element says that they want an alternative set of properties returned, then pull that
// data out of the instance vertex.
-
+
Map<String,Object> altPropHash = new HashMap<>();
-
+
if( namedQueryElementVertex == null ){
String msg = " null namedQueryElementVertex passed to getNamedQueryPropOverRide() ";
throw new AAIException("AAI_6114", msg);
}
-
+
List<String> propCollectList = new ArrayList<>();
Iterator <VertexProperty<Object>> vpI = namedQueryElementVertex.properties("property-collect-list");
while( vpI.hasNext() ){
propCollectList.add((String)vpI.next().value());
}
-
+
for( int i = 0; i < propCollectList.size(); i++ ){
String thisPropName = propCollectList.get(i);
Object instanceVal = instanceVertex.<Object>property(thisPropName).orElse(null);
altPropHash.put(thisPropName, instanceVal);
}
+
return altPropHash;
- }
-
+
+ } // End of getNamedQueryPropOverRide()
+
/**
* Named query constraint says stop.
*
@@ -1823,11 +1858,11 @@ public class ModelBasedProcessing {
public Boolean namedQueryConstraintSaysStop(String transId, String fromAppId,
Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
throws AAIException {
-
+
// For each (if any) property-constraint defined for this named-query-element, we will evaluate if
// the constraint is met or not-met. if there are constraints and any are not-met, then
// we return "true".
-
+
if( namedQueryElementVertex == null ){
String msg = " null namedQueryElementVertex passed to namedQueryConstraintSaysStop() ";
throw new AAIException("AAI_6114", msg);
@@ -1836,15 +1871,15 @@ public class ModelBasedProcessing {
String msg = " null instanceVertex passed to namedQueryConstraintSaysStop() ";
throw new AAIException("AAI_6114", msg);
}
-
+
Iterator<Vertex> constrPipe = this.traverseIncidentEdges(EdgeType.TREE, namedQueryElementVertex, "property-constraint");
if( constrPipe == null || !constrPipe.hasNext() ){
// There's no "property-constraint" defined for this named-query-element. No problem.
return false;
}
-
+
while( constrPipe.hasNext() ){
- Vertex constrVtx = constrPipe.next();
+ Vertex constrVtx = (Vertex) constrPipe.next();
// We found a property constraint that we will need to check
String conType = constrVtx.<String>property("constraint-type").orElse(null);
if( (conType == null) || conType.equals("")){
@@ -1861,13 +1896,13 @@ public class ModelBasedProcessing {
String msg = " Bad property-constraint (propVal) found in Named Query definition. ";
throw new AAIException("AAI_6133", msg);
}
-
+
// See if that constraint is met or not
String val = instanceVertex.<String>property(propName).orElse(null);
if( val == null ){
val = "";
}
-
+
if( conType.equals("EQUALS") ){
if( !val.equals(propVal) ){
// This constraint was not met
@@ -1885,10 +1920,12 @@ public class ModelBasedProcessing {
throw new AAIException("AAI_6133", msg);
}
}
- return false;
- }
-
+
+ return false;
+
+ } // End of namedQueryConstraintSaysStop()
+
/**
* Gets the named query extra data lookup.
*
@@ -1903,10 +1940,10 @@ public class ModelBasedProcessing {
public Map<String,Object> getNamedQueryExtraDataLookup(String transId, String fromAppId,
Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
throws AAIException {
-
+
// For each (if any) related-lookup defined for this named-query-element, we will go and
// and try to find it. All the related-lookup data will get put in a hash and returned.
-
+
if( namedQueryElementVertex == null ){
String msg = " null namedQueryElementVertex passed to getNamedQueryExtraDataLookup() ";
throw new AAIException("AAI_6114", msg);
@@ -1915,22 +1952,22 @@ public class ModelBasedProcessing {
String msg = " null instanceVertex passed to getNamedQueryExtraDataLookup() ";
throw new AAIException("AAI_6114", msg);
}
-
+
Map<String,Object> retHash = new HashMap<>();
-
+
Iterator<Vertex> lookPipe = this.traverseIncidentEdges(EdgeType.TREE, namedQueryElementVertex, "related-lookup");
if( lookPipe == null || !lookPipe.hasNext() ){
// There's no "related-lookup" defined for this named-query-element. No problem.
return retHash;
}
-
+
while( lookPipe.hasNext() ){
- Vertex relLookupVtx = lookPipe.next();
+ Vertex relLookupVtx = (Vertex) lookPipe.next();
// We found a related-lookup record to try and use
String srcProp = relLookupVtx.<String>property("source-node-property").orElse(null);
String srcNodeType = relLookupVtx.<String>property("source-node-type").orElse(null);
srcProp = getPropNameWithAliasIfNeeded(srcNodeType, srcProp);
-
+
if( (srcProp == null) || srcProp.equals("")){
String msg = " Bad related-lookup (source-node-property) found in Named Query definition. ";
throw new AAIException("AAI_6133", msg);
@@ -1942,38 +1979,38 @@ public class ModelBasedProcessing {
}
String targetProp = relLookupVtx.<String>property("target-node-property").orElse(null);
targetProp = getPropNameWithAliasIfNeeded(targetNodeType, targetProp);
-
+
if( (targetProp == null) || targetProp.equals("")){
String msg = " Bad related-lookup (target-node-property) found in Named Query definition. ";
throw new AAIException("AAI_6133", msg);
}
-
+
List<String> propCollectList = new ArrayList<>();
Iterator <VertexProperty<Object>> vpI = relLookupVtx.properties("property-collect-list");
while( vpI.hasNext() ){
propCollectList.add((String)vpI.next().value());
}
- // Use the value from the source to see if we can find ONE target record using the
+ // Use the value from the source to see if we can find ONE target record using the
// value from the source
String valFromInstance = instanceVertex.<String>property(srcProp).orElse(null);
if( valFromInstance == null ){
// if there is no key to use to go look up something, we should end it here and just
- // note what happened - no need to try to look something up by an empty key
- LOGGER.debug("WARNING - the instance data node of type [" + srcNodeType
- + "] did not have a value for property [" + srcProp
+ // note what happened - no need to try to look something up by an empty key
+ LOGGER.debug("WARNING - the instance data node of type [" + srcNodeType
+ + "] did not have a value for property [" + srcProp
+ "], so related-lookup is being abandoned.");
return retHash;
}
-
+
Map<String,Object> propHash = new HashMap<String,Object>();
propHash.put(targetProp, valFromInstance);
-
+
Optional<Vertex> result = dbMethHelper.locateUniqueVertex(targetNodeType, propHash);
if (!result.isPresent()) {
// If it can't find the lookup node, don't fail, just log that it couldn't be found ---
- LOGGER.debug("WARNING - Could not find lookup node that corresponds to nodeType ["
- + targetNodeType + "] propertyName = [" + srcProp
+ LOGGER.debug("WARNING - Could not find lookup node that corresponds to nodeType ["
+ + targetNodeType + "] propertyName = [" + srcProp
+ "], propVal = [" + valFromInstance
+ "] so related-lookup is being abandoned.");
return retHash;
@@ -1987,20 +2024,21 @@ public class ModelBasedProcessing {
Object valObj = tmpVtx.<Object>property(tmpPropName).orElse(null);
String lookupKey = targetNodeType + "." + tmpPropName;
retHash.put(lookupKey, valObj);
-
+
}
}
}
-
+
return retHash;
- }
+
+ } // End of getNamedQueryExtraDataLookup()
/**
* Collect NQ element hash.
*
* @param transId the trans id
* @param fromAppId the from app id
- * @param thisLevelElemVtx the element verrtx for this level
+ * @param thisLevelElemVtx the element verrtx for this level
* @param incomingTrail the incoming trail -- trail of nodeTypes that got us here (this nq-element vertex) from the top
* @param currentHash the current hash
* @param Map that got us to this point (that we will use as the base of the map we will return)
@@ -2013,21 +2051,21 @@ public class ModelBasedProcessing {
Vertex thisLevelElemVtx, String incomingTrail,
Map<String,String> currentHash, ArrayList <String> vidsTraversed,
int levelCounter ) throws AAIException {
-
+
levelCounter++;
Map<String, String> thisHash = new HashMap<>();
thisHash.putAll(currentHash);
-
+
if( levelCounter > MAX_LEVELS ) {
throw new AAIException("AAI_6125", "collectNQElementHash() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
}
String thisGuysTrail = "";
String thisElemVid = thisLevelElemVtx.id().toString();
-
+
// Find out what widget (and thereby what aai-node-type) this element represents.
String thisElementNodeType = getNqElementWidgetType( transId, fromAppId, thisLevelElemVtx, incomingTrail );
-
+
if( incomingTrail == null || incomingTrail.equals("") ){
// This is the first one
thisGuysTrail = thisElementNodeType;
@@ -2036,28 +2074,28 @@ public class ModelBasedProcessing {
thisGuysTrail = incomingTrail + "|" + thisElementNodeType;
}
vidsTraversed.add(thisElemVid);
-
+
String nqElementUuid = thisLevelElemVtx.<String>property("named-query-element-uuid").orElse(null);
if( nqElementUuid == null || nqElementUuid.equals("") ){
String msg = " named-query element UUID not found at trail = [" + incomingTrail + "].";
throw new AAIException("AAI_6133", msg);
}
- thisHash.put(thisGuysTrail, nqElementUuid );
-
+ thisHash.put(thisGuysTrail, nqElementUuid );
+
// Now go "down" and look at the sub-elements pointed to so we can get their data.
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, thisLevelElemVtx, "named-query-element");
while( vertI != null && vertI.hasNext() ){
Vertex tmpVert = vertI.next();
String vid = tmpVert.id().toString();
Map<String,Object> elementHash = new HashMap<String, Object>();
-
+
String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( connectToType != null && connectToType.equals("named-query-element") ){
// This is what we would expect
elementHash.put(vid, tmpVert);
}
else {
- String msg = " named query element has [connectedTo] edge to improper nodeType= ["
+ String msg = " named query element has [connectedTo] edge to improper nodeType= ["
+ connectToType + "] trail = [" + incomingTrail + "].";
throw new AAIException("AAI_6133", msg);
}
@@ -2066,15 +2104,16 @@ public class ModelBasedProcessing {
String tmpElVid = elVert.id().toString();
if( !vidsTraversed.contains(tmpElVid) ){
// This is one we would like to use - so we'll recursively get it's result set to add to ours
- Map<String, String> tmpHash = collectNQElementHash( transId, fromAppId,
+ Map<String, String> tmpHash = collectNQElementHash( transId, fromAppId,
elVert, thisGuysTrail, currentHash, vidsTraversed, levelCounter);
thisHash.putAll(tmpHash);
}
- }
+ }
}
return thisHash;
- }
-
+
+ } // End of collectNQElementHash()
+
/**
* Collect delete key hash.
@@ -2100,47 +2139,47 @@ public class ModelBasedProcessing {
int levelCounter, Map<String, Vertex> modConstraintHash,
String overRideModelId, String overRideModelVersionId )
throws AAIException {
-
+
levelCounter++;
Map<String, String> thisHash = new HashMap<>();
thisHash.putAll(currentHash);
-
+
if( levelCounter > MAX_LEVELS ) {
throw new AAIException("AAI_6125", "collectDeleteKeyHash() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
}
String thisGuysTrail = "";
String thisElemVid = thisLevelElemVtx.id().toString();
Map<String, Vertex> modConstraintHash2Use = null;
-
- // If this element represents a resource or service model, then we will replace this element with
+
+ // If this element represents a resource or service model, then we will replace this element with
// the "top" element of that resource or service model. That model-element already points to its
- // topology, so it will graft in that model's topology.
+ // topology, so it will graft in that model's topology.
// EXCEPT - if this element has "linkage-points" defined, then we need to do some extra
// processing for how we join to that model and will not try to go any "deeper".
List<String> linkagePtList = new ArrayList<>();
Iterator <VertexProperty<Object>> vpI = thisLevelElemVtx.properties("linkage-points");
- // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
+ // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
// we get back the whole array of Strings in one String - but still use the "vtx.properties()" to
// get it - but only look at the first thing returned by the iterator.
if( vpI.hasNext() ){
String tmpLinkageThing = (String)vpI.next().value();
linkagePtList = makeSureItsAnArrayList( tmpLinkageThing );
- }
-
+ }
+
if( linkagePtList != null && !linkagePtList.isEmpty() ){
// Whatever this element is - we are connecting to it via a linkage-point
// We will figure out what to do and then return without going any deeper
String elemFlag = thisLevelElemVtx.<String>property("new-data-del-flag").orElse(null);
-
+
Set<String> linkageConnectNodeTypes = getLinkageConnectNodeTypes( linkagePtList );
Iterator <?> linkNtIter = linkageConnectNodeTypes.iterator();
String incTrail = "";
if( incomingTrail != null && !incomingTrail.equals("") ){
incTrail = incomingTrail + "|";
}
-
+
while( linkNtIter.hasNext() ){
// The 'trail' (or trails) for this element should just be the to the first-contact on the linkage point
String linkTrail = incTrail + linkNtIter.next();
@@ -2161,14 +2200,14 @@ public class ModelBasedProcessing {
}
return thisHash;
}
-
+
// ----------------------------------------------------------------------------
- // If we got to here, then this was not an element that used a linkage-point
+ // If we got to here, then this was not an element that used a linkage-point
// ----------------------------------------------------------------------------
-
+
// Find out what widget-model (and thereby what aai-node-type) this element represents.
// Even if this element is pointing to a service or resource model, it must have a
- // first element which is a single widget-type model.
+ // first element which is a single widget-type model.
String thisElementNodeType = getModElementWidgetType( thisLevelElemVtx, incomingTrail );
String firstElementModelInfo = "";
@@ -2186,11 +2225,11 @@ public class ModelBasedProcessing {
// could be a resource or service model.
firstElementModelInfo = "," + overRideModelId + "," + overRideModelVersionId;
}
- }
+ }
else if( nodeTypeSupportsPersona(thisElementNodeType) ){
firstElementModelInfo = "," + subModelFirstModInvId + "," + subModelFirstVerId;
}
-
+
if( incomingTrail.equals("") ){
// This is the first one
thisGuysTrail = thisElementNodeType + firstElementModelInfo;
@@ -2198,27 +2237,27 @@ public class ModelBasedProcessing {
else {
thisGuysTrail = incomingTrail + "|" + thisElementNodeType + firstElementModelInfo;
}
-
+
String tmpFlag = "F";
Boolean stoppedByASvcOrResourceModelElement = false;
if( modType.equals("widget") ){
elementVtxForThisLevel = thisLevelElemVtx;
- // For the element-model for the widget at this level, record it's delete flag
+ // For the element-model for the widget at this level, record it's delete flag
tmpFlag = elementVtxForThisLevel.<String>property("new-data-del-flag").orElse(null);
}
else {
- // For an element that is referring to a resource or service model, we replace
+ // For an element that is referring to a resource or service model, we replace
// this element with the "top" element for that resource/service model so that the
// topology of that resource/service model will be included in this topology.
String modelVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
- if( subModelFirstModInvId == null || subModelFirstModInvId.equals("")
+ if( subModelFirstModInvId == null || subModelFirstModInvId.equals("")
|| subModelFirstVerId == null || subModelFirstVerId.equals("") ){
throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-invariant-id or model-version-id. Model-version-id = " +
modelVerId + ", at [" + incomingTrail + "]");
}
-
- // BUT -- if the model-element HERE at the resource/service level does NOT have
- // it's new-data-del-flag set to "T", then we do not need to go down into the
+
+ // BUT -- if the model-element HERE at the resource/service level does NOT have
+ // it's new-data-del-flag set to "T", then we do not need to go down into the
// sub-model looking for delete-able things.
tmpFlag = thisLevelElemVtx.<String>property("new-data-del-flag").orElse(null);
@@ -2229,10 +2268,10 @@ public class ModelBasedProcessing {
else {
stoppedByASvcOrResourceModelElement = true;
}
- // For the element-model for the widget at this level, record it's delete flag
+ // For the element-model for the widget at this level, record it's delete flag
tmpFlag = elementVtxForThisLevel.<String>property("new-data-del-flag").orElse(null);
}
-
+
String flag2Use = "F"; // by default we'll use "F" for the delete flag
if( ! stoppedByASvcOrResourceModelElement ){
// Since we haven't been stopped by a resource/service level "F", we can look at the lower level flag
@@ -2254,8 +2293,8 @@ public class ModelBasedProcessing {
flag2Use = "T";
}
}
-
- thisHash.put(thisGuysTrail, flag2Use);
+
+ thisHash.put(thisGuysTrail, flag2Use);
if( ! stoppedByASvcOrResourceModelElement ){
// Since we haven't been stopped by a resource/service level "F", we will continue to
// go "down" and look at the elements pointed to so we can get their data.
@@ -2264,7 +2303,7 @@ public class ModelBasedProcessing {
Vertex tmpVert = vertI.next();
String vid = tmpVert.id().toString();
Map<String,Object> elementHash = new HashMap<String, Object>();
-
+
String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( connectToType != null && connectToType.equals("model-element") ){
// A nice, regular old model-element
@@ -2281,15 +2320,15 @@ public class ModelBasedProcessing {
modConstraintHash.remove(constrainedElementSetUuid);
}
else {
- elementHash = getNextStepElementsFromSet( tmpVert );
+ elementHash = getNextStepElementsFromSet( tmpVert );
}
}
else {
- String msg = " model-element has [connectedTo] edge to improper nodeType= ["
+ String msg = " model-element has [connectedTo] edge to improper nodeType= ["
+ connectToType + "] trail = [" + incomingTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
for( Map.Entry<String, Object> entry : elementHash.entrySet() ){
Vertex elVert = (Vertex)(entry.getValue());
String tmpElVid = elVert.id().toString();
@@ -2297,19 +2336,20 @@ public class ModelBasedProcessing {
check4EdgeRule(tmpElNT, thisElementNodeType);
if( !vidsTraversed.contains(tmpElVid) ){
// This is one we would like to use - so we'll recursively get it's result set to add to ours
- Map<String, String> tmpHash = collectDeleteKeyHash( transId, fromAppId,
- elVert, thisGuysTrail,
+ Map<String, String> tmpHash = collectDeleteKeyHash( transId, fromAppId,
+ elVert, thisGuysTrail,
currentHash, vidsTraversed, levelCounter, modConstraintHash2Use,
"", "" );
thisHash.putAll(tmpHash);
}
- }
+ }
}
}
return thisHash;
- }
-
-
+
+ } // End of collectDeleteKeyHash()
+
+
/**
* Gets the linkage connect node types.
*
@@ -2319,18 +2359,18 @@ public class ModelBasedProcessing {
*/
public Set<String> getLinkageConnectNodeTypes(List<String> linkagePtList )
throws AAIException {
- // linkage points are a path from the top of a model to where we link in.
- // This method wants to just bring back a list of distinct last items.
+ // linkage points are a path from the top of a model to where we link in.
+ // This method wants to just bring back a list of distinct last items.
// Ie: for the input with these two: "pserver|lag-link|l-interface" and "pserver|p-interface|l-interface"
// it would just return a single item, "l-interface" since both linkage points end in that same node-type.
-
+
Set<String> linkPtSet = new HashSet<>();
-
+
if( linkagePtList == null ){
String detail = " Bad (null) linkagePtList passed to getLinkageConnectNodeTypes() ";
throw new AAIException("AAI_6125", detail);
}
-
+
for( int i = 0; i < linkagePtList.size(); i++ ){
String [] trailSteps = linkagePtList.get(i).split("\\|");
if( trailSteps == null || trailSteps.length == 0 ){
@@ -2340,11 +2380,12 @@ public class ModelBasedProcessing {
String lastStepNT = trailSteps[trailSteps.length - 1];
linkPtSet.add(lastStepNT);
}
-
+
return linkPtSet;
- }
-
-
+
+ }// End getLinkageConnectNodeTypes()
+
+
/**
* Collect topology for model-ver.
*
@@ -2368,34 +2409,34 @@ public class ModelBasedProcessing {
int levelCounter, Map<String, Vertex> modConstraintHash,
String overRideModelInvId, String overRideModelVersionId )
throws AAIException {
-
+
levelCounter++;
Multimap<String, String> thisMap = ArrayListMultimap.create();
thisMap.putAll(currentMap);
-
+
if( levelCounter > MAX_LEVELS ) {
throw new AAIException("AAI_6125", "collectTopology4ModelVer() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
}
String thisGuysTrail = "";
String thisElemVid = thisLevelElemVtx.id().toString();
Map<String, Vertex> modConstraintHash2Use = null;
-
- // If this element represents a resource or service model, then we will replace this element with
+
+ // If this element represents a resource or service model, then we will replace this element with
// the "top" element of that resource or service model. That model-element already points to its
- // topology, so it will graft in that model's topology.
+ // topology, so it will graft in that model's topology.
// EXCEPT - if this element defines "linkage-points" defined, then we need to do some extra
// processing for how we join to that model.
-
+
// Find out what widget-model (and thereby what aai-node-type) this element represents.
// Even if this element is pointing to a service or resource model, it must have a
- // first element which is a single widget-type model.
+ // first element which is a single widget-type model.
String firstElementModelInfo = "";
String thisElementNodeType = getModElementWidgetType( thisLevelElemVtx, incomingTrail );
if( nodeTypeSupportsPersona(thisElementNodeType) && overRideModelInvId != null && !overRideModelInvId.equals("") ){
firstElementModelInfo = "," + overRideModelInvId + "," + overRideModelVersionId;
}
-
+
Vertex elementVtxForThisLevel = null;
Vertex thisElementsModelVerVtx = getModelVerThatElementRepresents( thisLevelElemVtx, incomingTrail );
String subModelFirstModInvId = "";
@@ -2406,12 +2447,12 @@ public class ModelBasedProcessing {
// For an element that is referring to a resource or service model, we replace this
// this element with the "top" element for that resource/service model so that the
// topology of that resource/service model gets included in this topology.
- // -- Note - since that top element of a service or resource model will point to a widget model,
+ // -- Note - since that top element of a service or resource model will point to a widget model,
// we have to track what modelId/version it really maps so we can make our recursive call
Vertex thisElementsModelVtx = getModelGivenModelVer(thisElementsModelVerVtx, incomingTrail);
subModelFirstModInvId = thisElementsModelVtx.<String>property("model-invariant-id").orElse(null);
subModelFirstModVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
-
+
if( nodeTypeSupportsPersona(thisElementNodeType) ){
modInfo4Trail = "," + subModelFirstModInvId + "," + subModelFirstModVerId;
}
@@ -2420,13 +2461,13 @@ public class ModelBasedProcessing {
throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-invariant-id or model-version-id. Model-ver-id = " + modelVerId);
}
- elementVtxForThisLevel = getTopElementForSvcOrResModelVer(thisElementsModelVerVtx, incomingTrail);
+ elementVtxForThisLevel = getTopElementForSvcOrResModelVer(thisElementsModelVerVtx, incomingTrail);
modConstraintHash2Use = getModConstraintHash( thisLevelElemVtx, modConstraintHash );
}
else {
elementVtxForThisLevel = thisLevelElemVtx;
}
-
+
if( incomingTrail.equals("") ){
// This is the first one
thisGuysTrail = thisElementNodeType + firstElementModelInfo;
@@ -2434,7 +2475,7 @@ public class ModelBasedProcessing {
else {
thisGuysTrail = incomingTrail + "|" + thisElementNodeType + modInfo4Trail;
}
-
+
// We only want to ensure that a particular element does not repeat on a single "branch".
// It could show up on other branches in the case where it is a sub-model which is being
// used in more than one place.
@@ -2442,7 +2483,7 @@ public class ModelBasedProcessing {
List<String> thisTrailsVidsTraversed = new ArrayList <String>();
thisTrailsVidsTraversed.addAll(vidsTraversed);
thisTrailsVidsTraversed.add(thisElemVid);
-
+
// Look at the elements pointed to at this level and add on their data
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, elementVtxForThisLevel, "model-element", "constrained-element-set");
@@ -2466,35 +2507,35 @@ public class ModelBasedProcessing {
modConstraintHash.remove(constrainedElementSetUuid);
}
else {
- elementHash = getNextStepElementsFromSet( tmpVert );
+ elementHash = getNextStepElementsFromSet( tmpVert );
}
}
else {
- String msg = " model element has [connectedTo] edge to improper nodeType= ["
+ String msg = " model element has [connectedTo] edge to improper nodeType= ["
+ connectToType + "] trail = [" + incomingTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
for( Map.Entry<String, Object> entry : elementHash.entrySet() ){
Vertex elVert = (Vertex)(entry.getValue());
String tmpElVid = elVert.id().toString();
String tmpElNT = getModElementWidgetType( elVert, thisGuysTrail );
String tmpElStepName = getModelElementStepName( elVert, thisGuysTrail);
-
+
List<String> linkagePtList = new ArrayList <String>();
Iterator <VertexProperty<Object>> vpI = elVert.properties("linkage-points");
-
- // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
+
+ // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
// we get back the whole array of Strings in one String - but still use the "vtx.properties()" to
// get it - but only look at the first thing returned by the iterator.
if( vpI.hasNext() ){
String tmpLinkageThing = (String)vpI.next().value();
linkagePtList = makeSureItsAnArrayList( tmpLinkageThing );
- }
-
+ }
+
if( linkagePtList != null && !linkagePtList.isEmpty() ){
- // This is as far as we can go, we will use the linkage point info to define the
- // rest of this "trail"
+ // This is as far as we can go, we will use the linkage point info to define the
+ // rest of this "trail"
for( int i = 0; i < linkagePtList.size(); i++ ){
Multimap<String, String> tmpMap = collectTopology4LinkagePoint( transId, fromAppId,
linkagePtList.get(i), thisGuysTrail, currentMap);
@@ -2507,8 +2548,8 @@ public class ModelBasedProcessing {
if( !thisTrailsVidsTraversed.contains(tmpElVid) ){
// This is one we would like to use - so we'll recursively get it's result set to add to ours
Multimap<String, String> tmpMap = collectTopology4ModelVer( transId, fromAppId,
- elVert, thisGuysTrail,
- currentMap, thisTrailsVidsTraversed, levelCounter,
+ elVert, thisGuysTrail,
+ currentMap, thisTrailsVidsTraversed, levelCounter,
modConstraintHash2Use, subModelFirstModInvId, subModelFirstModVerId );
thisMap.putAll(tmpMap);
}
@@ -2519,14 +2560,16 @@ public class ModelBasedProcessing {
" on trail = [" + thisGuysTrail + "]. ";
System.out.println( msg );
throw new AAIException("AAI_6132", msg);
- }
- }
- }
+ }
+ }
+ }
}
+
return thisMap;
- }
-
-
+
+ } // End of collectTopology4ModelVer()
+
+
/**
* Check 4 edge rule.
*
@@ -2537,11 +2580,16 @@ public class ModelBasedProcessing {
*/
public void check4EdgeRule( String nodeTypeA, String nodeTypeB) throws AAIException {
// Throw an exception if there is no defined edge rule for this combination of nodeTypes in DbEdgeRules.
-
- final EdgeRules edgeRules = EdgeRules.getInstance();
-
- if( !edgeRules.hasEdgeRule(nodeTypeA, nodeTypeB)
- && !edgeRules.hasEdgeRule(nodeTypeB, nodeTypeA) ){
+
+ final EdgeIngestor edgeRules = SpringContextAware.getApplicationContext().getBean(EdgeIngestor.class);
+ //final EdgeRules edgeRules = EdgeRules.getInstance();
+
+ EdgeRuleQuery.Builder baseQ = new EdgeRuleQuery.Builder(nodeTypeA, nodeTypeB);
+ if (!edgeRules.hasRule(baseQ.build())) {
+
+
+ /* if( !edgeRules.hasEdgeRule(nodeTypeA, nodeTypeB)
+ && !edgeRules.hasEdgeRule(nodeTypeB, nodeTypeA) ){*/
// There's no EdgeRule for this -- find out if one of the nodeTypes is invalid or if
// they are valid, but there's just no edgeRule for them.
try {
@@ -2556,14 +2604,16 @@ public class ModelBasedProcessing {
String emsg = " Unrecognized nodeType bb [" + nodeTypeB + "]\n";
throw new AAIException("AAI_6115", emsg);
}
-
- String msg = " No Edge Rule found for this pair of nodeTypes (order does not matter) ["
+
+ String msg = " No Edge Rule found for this pair of nodeTypes (order does not matter) ["
+ nodeTypeA + "], [" + nodeTypeB + "].";
throw new AAIException("AAI_6120", msg);
}
+
+
}
-
-
+
+
/**
* Collect topology 4 linkage point.
*
@@ -2583,11 +2633,11 @@ public class ModelBasedProcessing {
Multimap<String, String> thisMap = ArrayListMultimap.create();
thisMap.putAll(currentMap);
String thisGuysTrail = incomingTrail;
-
+
// NOTE - "trails" can have multiple parts now since we track persona info for some.
// We just want to look at the node type info - which would be the piece
// before any commas (if there are any).
-
+
String [] trailSteps = thisGuysTrail.split("\\|");
if( trailSteps == null || trailSteps.length == 0 ){
throw new AAIException("AAI_6125", "Bad incomingTrail passed to collectTopology4LinkagePoint(): [" + incomingTrail + "] ");
@@ -2595,23 +2645,23 @@ public class ModelBasedProcessing {
String lastStepString = trailSteps[trailSteps.length - 1];
String [] stepPieces = lastStepString.split(",");
String lastStepNT = stepPieces[0];
-
+
// It is assumed that the linkagePoint string will be a pipe-delimited string where each
// piece is an AAIProperties.NODE_TYPE. For now, the first thing to connect to is what is on the farthest right.
// Example: linkagePoint = "pserver|p-interface|l-interface" would mean that we're connecting to the l-interface
// but that after that, we connect to a p-interface followed by a pserver.
// It might have been more clear to define it in the other direction, but for now, that is it. (16-07)
String linkagePointStr = linkagePointStrVal;
-
- // We are getting these with more than linkage thing in one string.
+
+ // We are getting these with more than linkage thing in one string.
// Ie. "pserver|lag-interface|l-interface, pserver|p-interface|l-interface, vlan|l-interface"
linkagePointStr = linkagePointStr.replace("[", "");
linkagePointStr = linkagePointStr.replace("]", "");
linkagePointStr = linkagePointStr.replace(" ", "");
-
+
String [] linkage = linkagePointStr.split("\\,");
for( int x = 0; x < linkage.length; x++ ){
- lastStepNT = stepPieces[0];
+ lastStepNT = stepPieces[0];
String thisStepNT = "";
String [] linkageSteps = linkage[x].split("\\|");
if( linkageSteps == null || linkageSteps.length == 0 ){
@@ -2626,10 +2676,10 @@ public class ModelBasedProcessing {
}
}
return thisMap;
-
- }
-
-
+
+ } // End of collectTopology4LinkagePoint()
+
+
/**
* Gets the next step elements from set.
*
@@ -2641,21 +2691,21 @@ public class ModelBasedProcessing {
throws AAIException {
// Take a constrained-element-set and figure out the total set of all the possible elements that it
// represents and return them as a Hash.
-
+
Map<String,Object> retElementHash = new HashMap<String, Object>();
-
+
if( constrElemSetVtx == null ){
String msg = " getNextStepElementsFromSet() called with null constrElemSetVtx ";
throw new AAIException("AAI_6125", msg);
}
-
+
String constrNodeType = constrElemSetVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
String constrElemSetUuid = constrElemSetVtx.<String>property("constrained-element-set-uuid").orElse(null);
if( constrNodeType == null || !constrNodeType.equals("constrained-element-set") ){
String msg = " getNextStepElementsFromSet() called with wrong type model: [" + constrNodeType + "]. ";
throw new AAIException("AAI_6125", msg);
}
-
+
ArrayList <Vertex> choiceSetVertArray = new ArrayList<Vertex>();
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, constrElemSetVtx, "element-choice-set");
int setCount = 0;
@@ -2667,12 +2717,12 @@ public class ModelBasedProcessing {
setCount++;
}
}
-
+
if( setCount == 0 ){
String msg = "No element-choice-set found under constrained-element-set-uuid = " + constrElemSetUuid;
throw new AAIException("AAI_6132", msg);
}
-
+
// Loop through each choice-set and grab the model-elements
for( int i = 0; i < setCount; i++ ){
Vertex choiceSetVert = choiceSetVertArray.get(i);
@@ -2688,24 +2738,24 @@ public class ModelBasedProcessing {
}
else {
// unsupported node type found for this choice-set
- String msg = "Unsupported nodeType (" + elNodeType
+ String msg = "Unsupported nodeType (" + elNodeType
+ ") found under choice-set under constrained-element-set-uuid = " + constrElemSetUuid;
throw new AAIException("AAI_6132", msg);
}
}
-
+
if( elCount == 0 ){
String msg = "No model-elements found in choice-set under constrained-element-set-uuid = " + constrElemSetUuid;
throw new AAIException("AAI_6132", msg);
}
-
+
}
return retElementHash;
-
- }
-
-
-
+
+ } // End of getNextStepElementsFromSet()
+
+
+
/**
* Gen topo map 4 named Q.
*
@@ -2719,14 +2769,14 @@ public class ModelBasedProcessing {
public Multimap<String, String> genTopoMap4NamedQ(String transId, String fromAppId,
Vertex queryVertex, String namedQueryUuid )
throws AAIException {
-
+
if( queryVertex == null ){
throw new AAIException("AAI_6125", "null queryVertex passed to genTopoMap4NamedQ()");
}
-
+
Multimap<String, String> initialEmptyMap = ArrayListMultimap.create();
List<String> vidsTraversed = new ArrayList<>();
-
+
Vertex firstElementVertex = null;
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, queryVertex, "named-query-element");
int elCount = 0;
@@ -2734,29 +2784,32 @@ public class ModelBasedProcessing {
elCount++;
firstElementVertex = vertI.next();
}
-
+
if( elCount > 1 ){
throw new AAIException("AAI_6133", "Illegal query defined: More than one first element defined for = " + namedQueryUuid);
}
-
+
if( firstElementVertex == null ){
throw new AAIException("AAI_6114", "Could not find first query element = " + namedQueryUuid);
}
-
+
Vertex modVtx = getModelThatNqElementRepresents( firstElementVertex, "" );
- String modelType = getModelTypeFromModel( modVtx, "" );
+ String modelType = getModelTypeFromModel( modVtx, "" );
if( ! modelType.equals("widget") ){
throw new AAIException("AAI_6133", "Bad Named Query Definition: First element must correspond to a widget type model. Named Query UUID = "
+ namedQueryUuid);
}
-
- return collectTopology4NamedQ( transId, fromAppId,
- firstElementVertex, "",
+
+ Multimap<String, String> collectedMap = collectTopology4NamedQ( transId, fromAppId,
+ firstElementVertex, "",
initialEmptyMap, vidsTraversed, 0);
- }
-
-
+
+ return collectedMap;
+
+ } // End of genTopoMap4NamedQ()
+
+
/**
* Collect topology 4 named Q.
*
@@ -2771,21 +2824,21 @@ public class ModelBasedProcessing {
Vertex thisLevelElemVtx, String incomingTrail,
Multimap<String,String> currentMap, List<String> vidsTraversed, int levelCounter )
throws AAIException {
-
+
levelCounter++;
Multimap<String, String> thisMap = ArrayListMultimap.create();
thisMap.putAll(currentMap);
-
+
String thisElemVid = thisLevelElemVtx.id().toString();
if( levelCounter > MAX_LEVELS ) {
throw new AAIException("AAI_6125", "collectModelStructure() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
}
String thisGuysTrail = "";
-
+
// find out what widget (and thereby what aai-node-type) this element represents
String thisElementNodeType = getNqElementWidgetType( transId, fromAppId, thisLevelElemVtx, incomingTrail );
-
+
if( incomingTrail.equals("") ){
// This is the first one
thisGuysTrail = thisElementNodeType;
@@ -2793,9 +2846,9 @@ public class ModelBasedProcessing {
else {
thisGuysTrail = incomingTrail + "|" + thisElementNodeType;
}
-
+
vidsTraversed.add(thisElemVid);
-
+
// Look at the elements pointed to at this level and add on their data
Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, thisLevelElemVtx, "named-query-element");
while( vertI != null && vertI.hasNext() ){
@@ -2806,15 +2859,15 @@ public class ModelBasedProcessing {
if( !vidsTraversed.contains(tmpVid) ){
// This is one we would like to use - so we'll recursively get it's result set to add to ours
Multimap<String, String> tmpMap = collectTopology4NamedQ( transId, fromAppId,
- tmpVert, thisGuysTrail,
+ tmpVert, thisGuysTrail,
currentMap, vidsTraversed, levelCounter);
thisMap.putAll(tmpMap);
}
}
-
+
return thisMap;
-
- }
+
+ } // End of collectTopology4NamedQ()
/**
@@ -2827,7 +2880,7 @@ public class ModelBasedProcessing {
*/
public Vertex getModelThatNqElementRepresents(Vertex elementVtx, String elementTrail )
throws AAIException {
-
+
// Get the model that a named-query element represents
Vertex modVtx = null;
Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.COUSIN, elementVtx, "model");
@@ -2836,13 +2889,13 @@ public class ModelBasedProcessing {
modCount++;
modVtx = mvertI.next();
}
-
+
if( modCount > 1 ){
String msg = "Illegal element defined: More than one model pointed to by a single named-query-element at [" +
elementTrail + "].";
throw new AAIException("AAI_6125", msg);
}
-
+
if( modVtx == null ){
String msg = "Bad named-query definition: Could not find model for element. ";
if( !elementTrail.equals("") ){
@@ -2850,20 +2903,20 @@ public class ModelBasedProcessing {
}
throw new AAIException("AAI_6132", msg);
}
-
+
String nodeType = modVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( (nodeType != null) && nodeType.equals("model") ){
return modVtx;
}
else {
- String msg = "Illegal Named Query element defined: expecting a 'model', but found 'isA' edge pointing to nodeType = " +
+ String msg = "Illegal Named Query element defined: expecting a 'model', but found 'isA' edge pointing to nodeType = " +
nodeType + "] at [" + elementTrail + "].";
throw new AAIException("AAI_6125", msg);
- }
-
- }
-
-
+ }
+
+ }// getModelThatNqElementRepresents()
+
+
/**
* Gets the model-ver that element represents.
*
@@ -2874,7 +2927,7 @@ public class ModelBasedProcessing {
*/
public Vertex getModelVerThatElementRepresents(Vertex elementVtx, String elementTrail )
throws AAIException {
-
+
// Get the model-ver that an element represents
Vertex modVerVtx = null;
Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.COUSIN, elementVtx, "model-ver");
@@ -2883,13 +2936,13 @@ public class ModelBasedProcessing {
modCount++;
modVerVtx = mvertI.next();
}
-
+
if( modCount > 1 ){
String msg = "Illegal element defined: More than one model pointed to by a single element at [" +
elementTrail + "].";
throw new AAIException("AAI_6125", msg);
}
-
+
if( modVerVtx == null ){
String msg = "Bad model definition: Could not find model-ver for model-element. ";
if( !elementTrail.equals("") ){
@@ -2897,21 +2950,21 @@ public class ModelBasedProcessing {
}
throw new AAIException("AAI_6132", msg);
}
-
+
String nodeType = modVerVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
if( (nodeType != null) && nodeType.equals("model-ver") ){
return modVerVtx;
}
else {
- String msg = "Illegal model-element defined: expecting a 'model-ver', but found 'isA' edge pointing to nodeType = " +
+ String msg = "Illegal model-element defined: expecting a 'model-ver', but found 'isA' edge pointing to nodeType = " +
nodeType + "] at [" + elementTrail + "].";
throw new AAIException("AAI_6125", msg);
}
-
- }
-
-
-
+
+ }// getModelVerThatElementRepresents()
+
+
+
/**
* Gets the model that is parent to model-ver node.
*
@@ -2922,7 +2975,7 @@ public class ModelBasedProcessing {
*/
public Vertex getModelGivenModelVer(Vertex modVerVtx, String elementTrail )
throws AAIException {
-
+
// Get the parent model for this "model-ver" node
Vertex modVtx = null;
Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.TREE, modVerVtx, "model");
@@ -2931,13 +2984,13 @@ public class ModelBasedProcessing {
modCount++;
modVtx = mvertI.next();
}
-
+
if( modCount > 1 ){
String msg = "Illegal model-ver node defined: More than one model points to it with a 'has' edge [" +
elementTrail + "].";
throw new AAIException("AAI_6125", msg);
}
-
+
if( modVtx == null ){
String msg = "Bad model-ver node: Could not find parent model. ";
if( !elementTrail.equals("") ){
@@ -2945,7 +2998,7 @@ public class ModelBasedProcessing {
}
throw new AAIException("AAI_6132", msg);
}
-
+
String nodeType = modVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);;
if( (nodeType != null) && nodeType.equals("model") ){
// Found what we were looking for.
@@ -2957,12 +3010,12 @@ public class ModelBasedProcessing {
elementTrail + "].";
throw new AAIException("AAI_6125", msg);
}
-
-
- }
-
-
-
+
+
+ }// getModelGivenModelVer()
+
+
+
/**
* Gets the model type.
*
@@ -2973,30 +3026,30 @@ public class ModelBasedProcessing {
*/
public String getModelTypeFromModel(Vertex modelVtx, String elementTrail )
throws AAIException {
-
+
// Get the model-type from a model vertex
if( modelVtx == null ){
String msg = " null modelVtx passed to getModelTypeFromModel() ";
throw new AAIException("AAI_6114", msg);
- }
-
+ }
+
String modelType = modelVtx.<String>property("model-type").orElse(null);
if( (modelType == null) || modelType.equals("") ){
String msg = "Could not find model-type for model encountered at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
if( !modelType.equals("widget") && !modelType.equals("resource") && !modelType.equals("service") ){
String msg = "Unrecognized model-type, [" + modelType + "] for model pointed to by element at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
- return modelType;
-
- }
-
-
-
+
+ return modelType;
+
+ }// getModelTypeFromModel()
+
+
+
/**
* Gets the model type given model-ver
*
@@ -3007,29 +3060,30 @@ public class ModelBasedProcessing {
*/
public String getModelTypeFromModelVer(Vertex modelVerVtx, String elementTrail )
throws AAIException {
-
+
// Get the model-type given a model-ver vertex
if( modelVerVtx == null ){
String msg = " null modelVerVtx passed to getModelTypeFromModelVer() ";
throw new AAIException("AAI_6114", msg);
- }
-
+ }
+
Vertex modVtx = getModelGivenModelVer( modelVerVtx, elementTrail );
String modelType = modVtx.<String>property("model-type").orElse(null);
if( (modelType == null) || modelType.equals("") ){
String msg = "Could not find model-type for model encountered at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
if( !modelType.equals("widget") && !modelType.equals("resource") && !modelType.equals("service") ){
String msg = "Unrecognized model-type, [" + modelType + "] for model pointed to by element at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
- return modelType;
- }
-
-
+
+ return modelType;
+
+ }// getModelTypeFromModelVer()
+
+
/**
* Gets the model-element step name.
@@ -3042,8 +3096,8 @@ public class ModelBasedProcessing {
*/
public String getModelElementStepName(Vertex elementVtx, String elementTrail)
throws AAIException {
-
- // Get the "step name" for a model-element
+
+ // Get the "step name" for a model-element
// Step names look like this for widget-models: AAIProperties.NODE_TYPE
// Step names look like this for resource/service models: "aai-node-type,model-invariant-id,model-version-id"
// NOTE -- if the element points to a resource or service model, then we'll return the
@@ -3051,14 +3105,14 @@ public class ModelBasedProcessing {
String thisElementNodeType = "?";
Vertex modVerVtx = getModelVerThatElementRepresents( elementVtx, elementTrail );
String modelType = getModelTypeFromModelVer( modVerVtx, elementTrail );
-
+
if( modelType == null ){
String msg = " could not determine modelType in getModelElementStepName(). elementTrail = [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
if( modelType.equals("widget") ){
- // NOTE: for models that have model-type = "widget", their "model-name" maps directly to aai-node-type
+ // NOTE: for models that have model-type = "widget", their "model-name" maps directly to aai-node-type
thisElementNodeType = modVerVtx.<String>property("model-name").orElse(null);
if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
@@ -3068,17 +3122,17 @@ public class ModelBasedProcessing {
}
else if( modelType.equals("resource") || modelType.equals("service") ){
Vertex modVtx = getModelGivenModelVer( modVerVtx, elementTrail );
- String modInvId = modVtx.<String>property("model-invariant-id").orElse(null);
+ String modInvId = modVtx.<String>property("model-invariant-id").orElse(null);
String modVerId = modVerVtx.<String>property("model-version-id").orElse(null);
Vertex relatedTopElementModelVtx = getTopElementForSvcOrResModelVer( modVerVtx, elementTrail );
Vertex relatedModelVtx = getModelVerThatElementRepresents( relatedTopElementModelVtx, elementTrail );
thisElementNodeType = relatedModelVtx.<String>property("model-name").orElse(null);
-
+
if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
String stepName = "";
if( nodeTypeSupportsPersona(thisElementNodeType) ){
// This nodeType that this resource or service model refers to does support persona-related fields, so
@@ -3094,10 +3148,11 @@ public class ModelBasedProcessing {
String msg = " Unrecognized model-type = [" + modelType + "] pointed to by element at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
- }
-
-
-
+
+ }// getModelElementStepName()
+
+
+
/**
* Node type supports persona.
*
@@ -3108,7 +3163,7 @@ public class ModelBasedProcessing {
*/
public Boolean nodeTypeSupportsPersona(String nodeType)
throws AAIException {
-
+
if( nodeType == null || nodeType.equals("") ){
return false;
}
@@ -3119,13 +3174,18 @@ public class ModelBasedProcessing {
String emsg = " Unrecognized nodeType [" + nodeType + "]\n";
throw new AAIException("AAI_6115", emsg);
}
-
+
Collection <String> props4ThisNT = loader.introspectorFromName(nodeType).getProperties();
- return props4ThisNT.contains(addDBAliasedSuffix("model-invariant-id")) &&
- props4ThisNT.contains(addDBAliasedSuffix("model-version-id"));
- }
-
-
+ if( !props4ThisNT.contains(addDBAliasedSuffix("model-invariant-id")) || !props4ThisNT.contains(addDBAliasedSuffix("model-version-id")) ){
+ return false;
+ }
+ else {
+ return true;
+ }
+
+ }// nodeTypeSupportsPersona()
+
+
/**
* Gets a Named Query element's widget type.
*
@@ -3137,16 +3197,16 @@ public class ModelBasedProcessing {
public String getNqElementWidgetType(String transId, String fromAppId,
Vertex elementVtx, String elementTrail )
throws AAIException {
-
+
String thisNqElementWidgetType = "";
// Get the associated node-type for the model pointed to by a named-query-element.
// NOTE -- if the element points to a resource or service model, then we'll return the
// widget-type of the first element (crown widget) for that model.
Vertex modVtx = getModelThatNqElementRepresents( elementVtx, elementTrail );
String modelType = getModelTypeFromModel( modVtx, elementTrail );
-
+
if( modelType == null || !modelType.equals("widget") ){
- String emsg = " Model Type must be 'widget' for NamedQuery elements. Found [" + modelType + "] at [" +
+ String emsg = " Model Type must be 'widget' for NamedQuery elements. Found [" + modelType + "] at [" +
elementTrail + "]\n";
throw new AAIException("AAI_6132", emsg);
}
@@ -3164,9 +3224,11 @@ public class ModelBasedProcessing {
return thisNqElementWidgetType;
}
}
- }
-
-
+
+
+ }// End getNqElementWidgetType()
+
+
/**
* Gets a model-element's top widget type.
*
@@ -3177,21 +3239,23 @@ public class ModelBasedProcessing {
*/
public String getModElementWidgetType(Vertex elementVtx, String elementTrail )
throws AAIException {
-
+
// Get the associated node-type for the model-ver pointed to by a model-element.
// NOTE -- if the element points to a resource or service model, then we'll return the
// widget-type of the first element (crown widget) for that model.
Vertex modVerVtx = getModelVerThatElementRepresents( elementVtx, elementTrail );
- return getModelVerTopWidgetType( modVerVtx, elementTrail );
- }
-
-
+ String thisElementNodeType = getModelVerTopWidgetType( modVerVtx, elementTrail );
+ return thisElementNodeType;
+
+ }// End getModElementWidgetType()
+
+
/**
* Gets the node using unique identifier
*
* @param transId the trans id
* @param fromAppId the from app id
- * @param nodeType the nodeType
+ * @param nodeType the nodeType
* @param idPropertyName the property name of the unique identifier
* @param uniqueIdVal the UUID value
* @return unique vertex found using UUID
@@ -3200,41 +3264,41 @@ public class ModelBasedProcessing {
public Vertex getNodeUsingUniqueId(String transId, String fromAppId,
String nodeType, String idPropertyName, String uniqueIdVal )
throws AAIException {
-
- // Given a unique identifier, get the Vertex
+
+ // Given a unique identifier, get the Vertex
if( uniqueIdVal == null || uniqueIdVal.equals("") ){
- String emsg = " Bad uniqueIdVal passed to getNodeUsingUniqueId(): ["
+ String emsg = " Bad uniqueIdVal passed to getNodeUsingUniqueId(): ["
+ uniqueIdVal + "]\n";
throw new AAIException("AAI_6118", emsg);
}
-
+
if( idPropertyName == null || idPropertyName.equals("") ){
- String emsg = " Bad idPropertyName passed to getNodeUsingUniqueId(): ["
+ String emsg = " Bad idPropertyName passed to getNodeUsingUniqueId(): ["
+ idPropertyName + "]\n";
throw new AAIException("AAI_6118", emsg);
- }
-
+ }
+
if( nodeType == null || nodeType.equals("") ){
- String emsg = " Bad nodeType passed to getNodeUsingUniqueId(): ["
+ String emsg = " Bad nodeType passed to getNodeUsingUniqueId(): ["
+ nodeType + "]\n";
throw new AAIException("AAI_6118", emsg);
- }
-
+ }
+
Vertex uniqVtx = null;
Iterable <?> uniqVerts = null;
uniqVerts = engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,nodeType).has(idPropertyName,uniqueIdVal).toList();
if( uniqVerts == null ){
- String emsg = "Node could not be found for nodeType = [" + nodeType
- + "], propertyName = [" + idPropertyName
+ String emsg = "Node could not be found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ "], propertyValue = [" + uniqueIdVal + "]\n";
throw new AAIException("AAI_6114", emsg);
}
- else {
+ else {
int count = 0;
Iterator <?> uniqVertsIter = uniqVerts.iterator();
if( !uniqVertsIter.hasNext() ){
- String emsg = "Node could not be found for nodeType = [" + nodeType
- + "], propertyName = [" + idPropertyName
+ String emsg = "Node could not be found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ "], propertyValue = [" + uniqueIdVal + "]\n";
throw new AAIException("AAI_6114", emsg);
}
@@ -3243,18 +3307,19 @@ public class ModelBasedProcessing {
count++;
uniqVtx = (Vertex) uniqVertsIter.next();
if( count > 1 ){
- String emsg = "More than one node found for nodeType = [" + nodeType
- + "], propertyName = [" + idPropertyName
+ String emsg = "More than one node found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ "], propertyValue = [" + uniqueIdVal + "]\n";
throw new AAIException("AAI_6132", emsg);
}
}
}
}
+
return uniqVtx;
- }
-
-
+ }// End getNodeUsingUniqueId()
+
+
/**
* Gets the model-ver nodes using name.
*
@@ -3267,97 +3332,102 @@ public class ModelBasedProcessing {
public List<Vertex> getModelVersUsingName(String transId, String fromAppId,
String modelName )
throws AAIException {
-
+
// Given a "model-name", find the model-ver vertices that this maps to
if( modelName == null || modelName.equals("") ){
- String emsg = " Bad modelName passed to getModelVersUsingName(): ["
+ String emsg = " Bad modelName passed to getModelVersUsingName(): ["
+ modelName + "]\n";
throw new AAIException("AAI_6118", emsg);
}
-
+
List<Vertex> retVtxArr = new ArrayList<>();
Iterator<Vertex> modVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-name",modelName);
if( !modVertsIter.hasNext() ){
- String emsg = "Model-ver record(s) could not be found for model-ver data passed. model-name = [" +
+ String emsg = "Model-ver record(s) could not be found for model-ver data passed. model-name = [" +
modelName + "]\n";
throw new AAIException("AAI_6132", emsg);
}
- else {
+ else {
while( modVertsIter.hasNext() ){
- Vertex tmpModelVerVtx = modVertsIter.next();
+ Vertex tmpModelVerVtx = (Vertex) modVertsIter.next();
retVtxArr.add(tmpModelVerVtx);
}
}
+
return retVtxArr;
- }
-
-
+
+ }// End getModelVersUsingName()
+
+
/**
* Gets the model-ver nodes using model-invariant-id.
*
* @param transId the trans id
* @param fromAppId the from app id
* @param model-invariant-id (uniquely identifies a model)
- * @return the model-ver's defined for the corresponding model
+ * @return the model-ver's defined for the corresponding model
* @throws AAIException the AAI exception
*/
public Iterator<Vertex> getModVersUsingModelInvId(String transId, String fromAppId,
String modelInvId )
throws AAIException {
-
+
// Given a "model-invariant-id", find the model-ver nodes that this maps to
if( modelInvId == null || modelInvId.equals("") ){
- String emsg = " Bad model-invariant-id passed to getModVersUsingModelInvId(): ["
+ String emsg = " Bad model-invariant-id passed to getModVersUsingModelInvId(): ["
+ modelInvId + "]\n";
throw new AAIException("AAI_6118", emsg);
}
-
+
Vertex modVtx = getNodeUsingUniqueId(transId, fromAppId, "model", "model-invariant-id", modelInvId);
List<Vertex> retVtxArr = getModVersUsingModel(transId, fromAppId, modVtx);
if( retVtxArr == null || retVtxArr.isEmpty() ){
- String emsg = " Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
+ String emsg = " Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
modelInvId + "]\n";
throw new AAIException("AAI_6132", emsg);
}
+
return retVtxArr.iterator();
- }
-
-
+ }// End getModVersUsingModelInvId()
+
+
/**
* Gets the model-ver nodes using a model node.
*
* @param transId the trans id
* @param fromAppId the from app id
* @param model vertex
- * @return the model-ver's defined for the corresponding model
+ * @return the model-ver's defined for the corresponding model
* @throws AAIException the AAI exception
*/
public List<Vertex> getModVersUsingModel(String transId, String fromAppId,
Vertex modVtx )
throws AAIException {
-
+
if( modVtx == null ){
String emsg = " Null model vertex passed to getModVersUsingModel(): ";
throw new AAIException("AAI_6118", emsg);
}
-
+
List<Vertex> retVtxArr = new ArrayList<>();
Iterator<Vertex> modVerVertsIter = this.traverseIncidentEdges(EdgeType.TREE, modVtx, "model-ver");
if(!modVerVertsIter.hasNext()){
String modelInvId = modVtx.<String>property("model-invariant-id").orElse(null);
- String emsg = "Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
+ String emsg = "Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
modelInvId + "]\n";
throw new AAIException("AAI_6132", emsg);
}
- else {
+ else {
while( modVerVertsIter.hasNext() ){
- Vertex tmpModelVtx = modVerVertsIter.next();
+ Vertex tmpModelVtx = (Vertex) modVerVertsIter.next();
retVtxArr.add(tmpModelVtx);
}
}
+
return retVtxArr;
- }
-
+
+ }// End getModVersUsingModel()
+
/**
* Gets the model-version-ids using model-name.
*
@@ -3370,40 +3440,41 @@ public class ModelBasedProcessing {
public List<String> getModelVerIdsUsingName( String transId, String fromAppId,
String modelName )
throws AAIException {
-
+
// Given a model-name find the model-ver nodes that it maps to
if( modelName == null || modelName.equals("") ){
- String emsg = " Bad modelName passed to getModelVerIdsUsingName(): ["
+ String emsg = " Bad modelName passed to getModelVerIdsUsingName(): ["
+ modelName + "]\n";
throw new AAIException("AAI_6118", emsg);
}
-
+
List<String> retArr = new ArrayList<>();
Iterator<Vertex> modVerVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-name",modelName);
if( !modVerVertsIter.hasNext() ){
- String emsg = " model-ver record(s) could not be found for model data passed. model-name = [" +
+ String emsg = " model-ver record(s) could not be found for model data passed. model-name = [" +
modelName + "]\n";
throw new AAIException("AAI_6114", emsg);
}
- else {
+ else {
while( modVerVertsIter.hasNext() ){
- Vertex modelVerVtx = modVerVertsIter.next();
+ Vertex modelVerVtx = (Vertex) modVerVertsIter.next();
String tmpUuid = modelVerVtx.<String>property("model-version-id").orElse(null);
if( (tmpUuid != null) && !tmpUuid.equals("") && !retArr.contains(tmpUuid) ){
retArr.add(tmpUuid);
}
}
}
-
+
if( retArr.isEmpty() ){
- String emsg = "No model-ver record found for model-name = ["
+ String emsg = "No model-ver record found for model-name = ["
+ modelName + "]\n";
throw new AAIException("AAI_6132", emsg);
}
+
return retArr;
- }
-
-
+ }// End getModelVerIdsUsingName()
+
+
/**
* Gets the model top widget type.
*
@@ -3418,14 +3489,14 @@ public class ModelBasedProcessing {
public String getModelVerTopWidgetType( String transId, String fromAppId,
String modelVersionId, String modelInvId, String modelName )
throws AAIException {
-
+
// Could be given a model-ver's key info (model-version-id), OR, just a (non-unique) model-name,
// Or just a model-invariant-id (which could have multiple model-ver records under it).
- // In any case, they should only map to one single "top" node-type for the first element.
-
+ // In any case, they should only map to one single "top" node-type for the first element.
+
String nodeType = "?";
Iterator<Vertex> modVerVertsIter;
-
+
if( modelVersionId != null && !modelVersionId.equals("") ){
// this would be the best - we can just look up the model-ver records directly
modVerVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-version-id",modelVersionId);
@@ -3440,13 +3511,13 @@ public class ModelBasedProcessing {
String msg = "Neither modelVersionId, modelInvariantId, nor modelName passed to: getModelVerTopWidgetType() ";
throw new AAIException("AAI_6120", msg);
}
-
+
if( !modVerVertsIter.hasNext() ){
String emsg = "model-ver record(s) could not be found for model data passed: modelInvariantId = [" + modelInvId +
"], modeVersionId = [" + modelVersionId + "], modelName = [" + modelName + "]\n";
throw new AAIException("AAI_6114", emsg);
}
- else {
+ else {
String lastNT = "";
if( !modVerVertsIter.hasNext() ){
String emsg = "model-ver record(s) could not be found for model data passed: modelInvariantId = [" + modelInvId +
@@ -3454,13 +3525,13 @@ public class ModelBasedProcessing {
throw new AAIException("AAI_6114", emsg);
}
while( modVerVertsIter.hasNext() ){
- Vertex tmpModVerVtx = modVerVertsIter.next();
+ Vertex tmpModVerVtx = (Vertex) modVerVertsIter.next();
String tmpNT = getModelVerTopWidgetType( tmpModVerVtx, "" );
if( lastNT != null && !lastNT.equals("") ){
if( !lastNT.equals(tmpNT) ){
- String emsg = "Different top-node-types (" + tmpNT + ", " + lastNT
+ String emsg = "Different top-node-types (" + tmpNT + ", " + lastNT
+ ") found for model data passed. (" +
- " modelVersionId = [" + modelVersionId +
+ " modelVersionId = [" + modelVersionId +
"], modelId = [" + modelInvId +
"], modelName = [" + modelName +
"])\n";
@@ -3471,10 +3542,12 @@ public class ModelBasedProcessing {
nodeType = tmpNT;
}
}
+
return nodeType;
- }
-
-
+
+ }// End getModelVerTopWidgetType()
+
+
/**
* Gets the widget type that this model-ver starts with.
*
@@ -3493,7 +3566,7 @@ public class ModelBasedProcessing {
String msg = " Could not determine modelType in getModelVerTopWidgetType(). elementTrail = [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
String thisElementNodeType = "?";
if( modelType.equals("widget") ){
// NOTE: for models that have model-type = "widget", their child model-ver nodes will
@@ -3518,11 +3591,12 @@ public class ModelBasedProcessing {
String msg = " Unrecognized model-type = [" + modelType + "] pointed to by element at [" + elementTrail + "].";
throw new AAIException("AAI_6132", msg);
}
-
+
return thisElementNodeType;
- }
-
-
+
+ }// getModelVerTopWidgetType()
+
+
/**
* Validate model.
*
@@ -3532,10 +3606,10 @@ public class ModelBasedProcessing {
* @param apiVersion the api version
* @throws AAIException the AAI exception
*/
- public void validateModel(String transId, String fromAppId, String modelVersionIdVal, String apiVersion )
+ public void validateModel(String transId, String fromAppId, String modelVersionIdVal, String apiVersion )
throws AAIException {
-
- // Note - this will throw an exception if the model either can't be found, or if
+
+ // Note - this will throw an exception if the model either can't be found, or if
// we can't figure out its topology map.
Vertex modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
"model-version-id", modelVersionIdVal);
@@ -3550,9 +3624,10 @@ public class ModelBasedProcessing {
System.out.println("INFO -- " + msg );
}
return;
- }
-
-
+
+ }// End validateModel()
+
+
/**
* Validate named query.
*
@@ -3562,28 +3637,29 @@ public class ModelBasedProcessing {
* @param apiVersion the api version
* @throws AAIException the AAI exception
*/
- public void validateNamedQuery(String transId, String fromAppId, String namedQueryUuid, String apiVersion )
+ public void validateNamedQuery(String transId, String fromAppId, String namedQueryUuid, String apiVersion )
throws AAIException {
-
- // Note - this will throw an exception if the named query either can't be found, or if
+
+ // Note - this will throw an exception if the named query either can't be found, or if
// we can't figure out its topology map.
Vertex nqVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query",
"named-query-uuid", namedQueryUuid);
-
+
if( nqVtx == null ){
String msg = " Could not find named-query with namedQueryUuid = [" + namedQueryUuid + "].";
throw new AAIException("AAI_6114", msg);
}
else {
- //Multimap<String, String> topoMap = genTopoMap4NamedQ( "junkTransId", "junkFromAppId",
+ //Multimap<String, String> topoMap = genTopoMap4NamedQ( "junkTransId", "junkFromAppId",
// graph, nqVtx, namedQueryUuid );
//System.out.println("DEBUG -- for test only : --- ");
//System.out.println("DEBUG -- topomap = [" + topoMap + "]");
}
return;
- }
-
-
+
+ }// End validateNamedQuery()
+
+
/**
* Show result set.
*
@@ -3591,7 +3667,7 @@ public class ModelBasedProcessing {
* @param levelCount the level count
*/
public void showResultSet(ResultSet resSet, int levelCount ) {
-
+
levelCount++;
String propsStr = "";
for( int i= 1; i <= levelCount; i++ ){
@@ -3602,11 +3678,11 @@ public class ModelBasedProcessing {
}
String nt = resSet.getVert().<String>property(AAIProperties.NODE_TYPE).orElse(null);
propsStr = propsStr + "[" + nt + "] ";
-
+
//propsStr = propsStr + " newDataDelFlag = " + resSet.getNewDataDelFlag() + ", trail = " + resSet.getLocationInModelSubGraph();
//propsStr = propsStr + "limitDesc = [" + resSet.getPropertyLimitDesc() + "]";
propsStr = propsStr + " trail = " + resSet.getLocationInModelSubGraph();
-
+
Map<String,Object> overrideHash = resSet.getPropertyOverRideHash();
if( overrideHash != null && !overrideHash.isEmpty() ){
for( Map.Entry<String, Object> entry : overrideHash.entrySet() ){
@@ -3619,7 +3695,7 @@ public class ModelBasedProcessing {
Iterator<VertexProperty<Object>> pI = resSet.getVert().properties();
while( pI.hasNext() ){
VertexProperty<Object> tp = pI.next();
- if( ! tp.key().startsWith("aai")
+ if( ! tp.key().startsWith("aai")
&& ! tp.key().equals("source-of-truth")
//&& ! tp.key().equals("resource-version")
&& ! tp.key().startsWith("last-mod")
@@ -3638,42 +3714,45 @@ public class ModelBasedProcessing {
propsStr = propsStr + " [" + propName + " = " + propVal.toString() + "]";
}
}
-
+
System.out.println( propsStr );
LOGGER.info(propsStr);
-
+
if( !resSet.getSubResultSet().isEmpty() ){
ListIterator<ResultSet> listItr = resSet.getSubResultSet().listIterator();
while( listItr.hasNext() ){
showResultSet( listItr.next(), levelCount );
}
}
- }
-
- private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String connectedNodeType) throws AAIException {
- return this.engine.getQueryBuilder(startV).createEdgeTraversal(treeType, startV, loader.introspectorFromName(connectedNodeType));
+
+ }// end of showResultSet()
+
+ private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String connectedNodeType) throws AAIUnknownObjectException, AAIException {
+ QueryBuilder builder = this.engine.getQueryBuilder(startV).createEdgeTraversal(treeType, startV, loader.introspectorFromName(connectedNodeType));
+ return builder;
}
-
- private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String... connectedNodeType) throws AAIException {
+
+ private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String... connectedNodeType) throws AAIUnknownObjectException, AAIException {
QueryBuilder[] builders = new QueryBuilder[connectedNodeType.length];
for (int i = 0; i < connectedNodeType.length; i++) {
builders[i] = this.engine.getQueryBuilder(startV).createEdgeTraversal(EdgeType.TREE, startV, loader.introspectorFromName(connectedNodeType[i]));
}
- return this.engine.getQueryBuilder(startV).union(builders);
+ QueryBuilder builder = this.engine.getQueryBuilder(startV).union(builders);
+ return builder;
}
private String addDBAliasedSuffix(String propName) {
return propName + AAIProperties.DB_ALIAS_SUFFIX;
}
-
+
protected String getPropNameWithAliasIfNeeded(String nodeType, String propName) throws AAIUnknownObjectException {
-
+
String retPropName = propName;
if( loader.introspectorFromName(nodeType).getPropertyMetadata(propName, PropertyMetadata.DB_ALIAS).isPresent() ){
return propName + AAIProperties.DB_ALIAS_SUFFIX;
}
return retPropName;
}
-
+
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java b/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java
index 40d3027..0725802 100644
--- a/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java
+++ b/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java
@@ -51,6 +51,10 @@ import org.onap.aai.dbgen.PropertyLimitDesc;
import org.onap.aai.dbgraphgen.ModelBasedProcessing;
import org.onap.aai.dbgraphgen.ResultSet;
import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.extensions.AAIExtensionMap;
import org.onap.aai.introspection.Introspector;
@@ -64,14 +68,14 @@ import org.onap.aai.query.builder.QueryBuilder;
import org.onap.aai.schema.enums.ObjectMetadata;
import org.onap.aai.schema.enums.PropertyMetadata;
import org.onap.aai.serialization.db.DBSerializer;
-import org.onap.aai.serialization.db.EdgeRule;
-import org.onap.aai.serialization.db.EdgeRules;
import org.onap.aai.serialization.engines.QueryStyle;
import org.onap.aai.serialization.engines.JanusGraphDBEngine;
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.onap.aai.serialization.queryformats.exceptions.AAIFormatVertexException;
import org.onap.aai.serialization.queryformats.utils.UrlBuilder;
+import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.util.StoreNotificationEvent;
+import org.springframework.beans.factory.annotation.Autowired;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
@@ -79,6 +83,10 @@ import com.google.common.base.CaseFormat;
import edu.emory.mathcs.backport.java.util.Collections;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import com.jcabi.log.Logger;
+
/**
* Database Mapping class which acts as the middle man between the REST interface objects
* for the Search namespace
@@ -86,9 +94,20 @@ import edu.emory.mathcs.backport.java.util.Collections;
*/
public class SearchGraph {
- private final String COMPONENT = "aaidbmap";
- private AAIExtensionMap aaiExtMap;
private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(SearchGraph.class);
+
+ private LoaderFactory loaderFactory;
+
+ private EdgeIngestor edgeIngestor;
+
+ private SchemaVersions schemaVersions;
+
+ @Autowired
+ public SearchGraph(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){
+ this.loaderFactory = loaderFactory;
+ this.edgeIngestor = edgeIngestor;
+ this.schemaVersions = schemaVersions;
+ }
/**
* Get the search result based on the includeNodeType and depth provided.
*
@@ -227,7 +246,7 @@ public class SearchGraph {
return response;
}
- private URI craftUriFromQueryParams(Loader loader, String startNodeType, List<String> startNodeKeyParams) throws UnsupportedEncodingException, AAIException {
+ private URI craftUriFromQueryParams(Loader loader, String startNodeType, List<String> startNodeKeyParams) throws UnsupportedEncodingException, IllegalArgumentException, UriBuilderException, AAIException {
Introspector relationship = loader.introspectorFromName("relationship");
relationship.setValue("related-to", startNodeType);
@@ -491,10 +510,18 @@ public class SearchGraph {
* @param nodeType the node type
* @return the edge label
* @throws AAIException the AAI exception
+ * @throws EdgeRuleNotFoundException
*/
- public static String[] getEdgeLabel(String targetNodeType, String nodeType) {
- Map<String, EdgeRule> rules = EdgeRules.getInstance().getEdgeRules(targetNodeType, nodeType);
- return rules.keySet().toArray(new String[0]);
+ public String[] getEdgeLabel(String targetNodeType, String nodeType) throws AAIException, EdgeRuleNotFoundException{
+
+
+ EdgeRuleQuery query = new EdgeRuleQuery.Builder(targetNodeType, nodeType).build();
+ Multimap<String, EdgeRule> edgeRules = ArrayListMultimap.create();
+ edgeRules = edgeIngestor.getRules(query);
+
+ //Map<String, EdgeRule> rules = EdgeRules.getInstance().getEdgeRules(targetNodeType, nodeType);
+ String[] results = edgeRules.keySet().toArray(new String[0]);
+ return results;
}
@@ -511,20 +538,20 @@ public class SearchGraph {
*/
public Response runNamedQuery(String fromAppId, String transId, String queryParameters,
DBConnectionType connectionType,
- AAIExtensionMap aaiExtMap) throws AAIException {
+ AAIExtensionMap aaiExtMap) throws JAXBException, AAIException {
Introspector inventoryItems;
boolean success = true;
TransactionalGraphEngine dbEngine = null;
try {
- MoxyLoader loader = (MoxyLoader)LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
+ MoxyLoader loader = (MoxyLoader)loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
DynamicJAXBContext jaxbContext = loader.getJAXBContext();
dbEngine = new JanusGraphDBEngine(
QueryStyle.TRAVERSAL,
connectionType,
loader);
- DBSerializer serializer = new DBSerializer(AAIProperties.LATEST, dbEngine, ModelType.MOXY, fromAppId);
+ DBSerializer serializer = new DBSerializer(schemaVersions.getDefaultVersion(), dbEngine, ModelType.MOXY, fromAppId);
ModelBasedProcessing processor = new ModelBasedProcessing(loader, dbEngine, serializer);
dbEngine.startTransaction();
@@ -547,7 +574,7 @@ public class SearchGraph {
DynamicEntity qp = modelAndNamedQuerySearch.get("queryParameters");
String namedQueryUuid = null;
if ((qp != null) && qp.isSet("namedQuery")) {
- DynamicEntity namedQuery = qp.get("namedQuery");
+ DynamicEntity namedQuery = (DynamicEntity) qp.get("namedQuery");
if (namedQuery.isSet("namedQueryUuid")) {
namedQueryUuid = namedQuery.get("namedQueryUuid");
@@ -632,19 +659,19 @@ public class SearchGraph {
public Response executeModelOperation(String fromAppId, String transId, String queryParameters,
DBConnectionType connectionType,
boolean isDelete,
- AAIExtensionMap aaiExtMap) throws AAIException, UnsupportedEncodingException {
+ AAIExtensionMap aaiExtMap) throws JAXBException, AAIException, DynamicException, UnsupportedEncodingException {
Response response;
boolean success = true;
TransactionalGraphEngine dbEngine = null;
try {
- MoxyLoader loader = (MoxyLoader) LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
+ MoxyLoader loader = (MoxyLoader) loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
DynamicJAXBContext jaxbContext = loader.getJAXBContext();
dbEngine = new JanusGraphDBEngine(
QueryStyle.TRAVERSAL,
connectionType,
loader);
- DBSerializer serializer = new DBSerializer(AAIProperties.LATEST, dbEngine, ModelType.MOXY, fromAppId);
+ DBSerializer serializer = new DBSerializer(schemaVersions.getDefaultVersion(), dbEngine, ModelType.MOXY, fromAppId);
ModelBasedProcessing processor = new ModelBasedProcessing(loader, dbEngine, serializer);
dbEngine.startTransaction();
@@ -688,7 +715,7 @@ public class SearchGraph {
DynamicEntity qp = modelAndNamedQuerySearch.get("queryParameters");
if (qp.isSet("model")) {
- DynamicEntity model = qp.get("model");
+ DynamicEntity model = (DynamicEntity) qp.get("model");
// on an old-style model object, the following 4 attrs were all present
if (model.isSet("modelNameVersionId")) {
@@ -713,7 +740,7 @@ public class SearchGraph {
if (model.isSet("modelVers")) {
// we know that this is new style, because modelVers was not an option
// before v9
- DynamicEntity modelVers = model.get("modelVers");
+ DynamicEntity modelVers = (DynamicEntity) model.get("modelVers");
if (modelVers.isSet("modelVer")) {
List<DynamicEntity> modelVerList = modelVers.get("modelVer");
// if they send more than one, too bad, they get the first one
@@ -734,7 +761,7 @@ public class SearchGraph {
List<Map<String,Object>> startNodeFilterHash = new ArrayList<>();
- String resourceVersion = mapInstanceFilters(modelAndNamedQuerySearch.get("instanceFilters"),
+ String resourceVersion = mapInstanceFilters((DynamicEntity)modelAndNamedQuerySearch.get("instanceFilters"),
startNodeFilterHash, jaxbContext);
if (isDelete) {
@@ -749,13 +776,7 @@ public class SearchGraph {
Vertex firstVert = rs.getVert();
String restURI = serializer.getURIForVertex(firstVert).toString();
- String notificationVersion = AAIProperties.LATEST.toString();
- if (restURI.startsWith("/")) {
- restURI = "/aai/" + notificationVersion + restURI;
- } else {
- restURI = "/aai/" + notificationVersion + "/" + restURI;
- }
-
+ String notificationVersion = schemaVersions.getDefaultVersion().toString();
Map<String,String> delResult = processor.runDeleteByModel( transId, fromAppId,
modelVersionId, topNodeType, startNodeFilterHash.get(0), aaiExtMap.getApiVersion(), resourceVersion );
@@ -836,7 +857,7 @@ public class SearchGraph {
return null;
}
@SuppressWarnings("unchecked")
- List<DynamicEntity> instanceFilter = instanceFilters.get("instanceFilter");
+ List<DynamicEntity> instanceFilter = (ArrayList<DynamicEntity>)instanceFilters.get("instanceFilter");
String resourceVersion = null;
for (DynamicEntity instFilt : instanceFilter) {
@@ -855,7 +876,7 @@ public class SearchGraph {
if (anyEnt.isSet(propName)) {
thisNodeFilterHash.put(nodeType + "." + CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, propName), anyEnt.get(propName));
if (propName.equals("resourceVersion") && resourceVersion == null) {
- resourceVersion = anyEnt.get(propName);
+ resourceVersion = (String)anyEnt.get(propName);
}
}
}
@@ -879,7 +900,7 @@ public class SearchGraph {
return;
}
@SuppressWarnings("unchecked")
- List<DynamicEntity> secondaryFilter = secondaryFilts.get("secondaryFilt");
+ List<DynamicEntity> secondaryFilter = (ArrayList<DynamicEntity>)secondaryFilts.get("secondaryFilt");
for (DynamicEntity secondaryFilt : secondaryFilter) {
List<DynamicEntity> any = secondaryFilt.get("any");
@@ -1061,8 +1082,8 @@ public class SearchGraph {
String modelName = null;
try {
// Try to get the modelName if we can. Otherwise, do not fail, just return what we have already.
- String modelInvariantIdLocal = vert.<String>property("model-invariant-id-local").orElse(null); // this one points at a model
- String modelVersionIdLocal = vert.<String>property("model-version-id-local").orElse(null); // this one points at a model-ver
+ String modelInvariantIdLocal = (String)vert.<String>property("model-invariant-id-local").orElse(null); // this one points at a model
+ String modelVersionIdLocal = (String)vert.<String>property("model-version-id-local").orElse(null); // this one points at a model-ver
if ( (modelInvariantIdLocal != null && modelVersionIdLocal != null)
&& (modelInvariantIdLocal.length() > 0 && modelVersionIdLocal.length() > 0) ) {
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
index 9029366..6801aee 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -20,21 +20,20 @@
package org.onap.aai.interceptors;
public final class AAIHeaderProperties {
-
- public static final String REQUEST_CONTEXT = "aai-request-context";
-
- public static final String HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
-
- public static final String TRANSACTION_ID = "X-TransactionId";
-
- public static final String FROM_APP_ID = "X-FromAppId";
-
- public static final String AAI_TX_ID = "X-AAI-TXID";
-
- public static final String AAI_REQUEST = "X-REQUEST";
-
- public static final String AAI_REQUEST_TS = "X-REQUEST-TS";
-
- private AAIHeaderProperties() {
- }
+
+ private AAIHeaderProperties(){}
+
+ public static final String REQUEST_CONTEXT = "aai-request-context";
+
+ public static final String HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
+
+ public static final String TRANSACTION_ID = "X-TransactionId";
+
+ public static final String FROM_APP_ID = "X-FromAppId";
+
+ public static final String AAI_TX_ID = "X-AAI-TXID";
+
+ public static final String AAI_REQUEST = "X-REQUEST";
+
+ public static final String AAI_REQUEST_TS = "X-REQUEST-TS";
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
index 90ab047..146f847 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -19,14 +19,22 @@
*/
package org.onap.aai.interceptors.post;
+/**
+ * Response Filter order is done reverse sorted
+ * so in the following case the first response filter would be
+ * HEADER_MANIPULATION, RESPONSE_TRANS_LOGGING, RESET_LOGGING_CONTEXT,
+ * and INVALID_RESPONSE_STATUS
+ */
public final class AAIResponseFilterPriority {
+
+ private AAIResponseFilterPriority() {}
+
+ public static final int INVALID_RESPONSE_STATUS = 1000;
- public static final int HEADER_MANIPULATION = 1000;
+ public static final int RESET_LOGGING_CONTEXT = 2000;
- public static final int RESPONSE_TRANS_LOGGING = 2000;
+ public static final int RESPONSE_TRANS_LOGGING = 3000;
- public static final int RESET_LOGGING_CONTEXT = 3000;
+ public static final int HEADER_MANIPULATION = 4000;
- private AAIResponseFilterPriority() {
- }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java
new file mode 100644
index 0000000..7fd0b9c
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+@Priority(AAIResponseFilterPriority.INVALID_RESPONSE_STATUS)
+public class InvalidResponseStatus extends AAIContainerFilter implements ContainerResponseFilter {
+
+ @Override
+ public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+ throws IOException {
+
+ if(responseContext.getStatus() == 405){
+
+ responseContext.setStatus(400);
+ AAIException e = new AAIException("AAI_3012");
+ ArrayList<String> templateVars = new ArrayList<>();
+
+ List<MediaType> mediaTypeList = new ArrayList<>();
+
+ String contentType = responseContext.getHeaderString("Content-Type");
+
+ if (contentType == null) {
+ mediaTypeList.add(MediaType.APPLICATION_XML_TYPE);
+ } else {
+ mediaTypeList.add(MediaType.valueOf(contentType));
+ }
+
+ String message = ErrorLogHelper.getRESTAPIErrorResponse(mediaTypeList, e, templateVars);
+
+ responseContext.setEntity(message);
+ }
+
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
index f8c5644..baf28ad 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
@@ -19,21 +19,21 @@
*/
package org.onap.aai.interceptors.post;
-import java.io.IOException;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Priority;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.container.ContainerResponseFilter;
-
-import org.onap.aai.interceptors.AAIContainerFilter;
-import org.onap.aai.logging.LoggingContext;
-import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.core.Response.StatusType;
+import java.io.IOException;
@Priority(AAIResponseFilterPriority.RESET_LOGGING_CONTEXT)
public class ResetLoggingContext extends AAIContainerFilter implements ContainerResponseFilter {
@@ -47,23 +47,52 @@ public class ResetLoggingContext extends AAIContainerFilter implements Container
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
throws IOException {
- this.cleanLoggingContext();
+ this.cleanLoggingContext(responseContext);
}
- private void cleanLoggingContext() {
- final String responseCode = LoggingContext.responseCode();
- String url = httpServletRequest.getRequestURL().toString();
+ private void cleanLoggingContext(ContainerResponseContext responseContext) {
+ //String url = httpServletRequest.getRequestURL().toString();
+ boolean success = true;
+ String uri = httpServletRequest.getRequestURI();
+ String queryString = httpServletRequest.getQueryString();
- if (responseCode != null && responseCode.startsWith("ERR.")) {
- LoggingContext.statusCode(StatusCode.ERROR);
- LOGGER.error(url + " call failed with responseCode=" + responseCode);
- } else {
- LoggingContext.statusCode(StatusCode.COMPLETE);
- LOGGER.info(url + " call succeeded");
+ if(queryString != null && !queryString.isEmpty()){
+ uri = uri + "?" + queryString;
}
+ // For now, we use the the HTTP status code,
+ // This may change, once the requirements for response codes are defined
+ int httpStatusCode = responseContext.getStatus();
+ if ( httpStatusCode < 100 || httpStatusCode > 599 ) {
+ httpStatusCode = Status.INTERNAL_SERVER_ERROR.getStatusCode();
+ }
+ LoggingContext.responseCode(Integer.toString(httpStatusCode));
+
+ StatusType sType = responseContext.getStatusInfo();
+ if ( sType != null ) {
+ Status.Family sFamily = sType.getFamily();
+ if ( ! ( Status.Family.SUCCESSFUL.equals(sFamily) ||
+ ( Status.NOT_FOUND.equals(Status.fromStatusCode(httpStatusCode)) ) ) ) {
+ success = false;
+ }
+ }
+ else {
+ if ( (httpStatusCode < 200 || httpStatusCode > 299) && ( ! ( Status.NOT_FOUND.equals(Status.fromStatusCode(httpStatusCode) ) ) ) ) {
+ success = false;
+ }
+ }
+ if (success) {
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LOGGER.info(uri + " call succeeded");
+ }
+ else {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LOGGER.error(uri + " call failed with responseCode=" + httpStatusCode);
+ }
LoggingContext.clear();
+
+
}
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
index 6d6dbd8..9d4efe7 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
@@ -19,19 +19,20 @@
*/
package org.onap.aai.interceptors.post;
-import java.io.IOException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
import javax.annotation.Priority;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.container.ContainerResponseFilter;
-
-import org.onap.aai.interceptors.AAIContainerFilter;
-import org.onap.aai.interceptors.AAIHeaderProperties;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
@Priority(AAIResponseFilterPriority.HEADER_MANIPULATION)
public class ResponseHeaderManipulation extends AAIContainerFilter implements ContainerResponseFilter {
+ private static final String DEFAULT_XML_TYPE = MediaType.APPLICATION_XML;
@Override
public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
@@ -43,7 +44,21 @@ public class ResponseHeaderManipulation extends AAIContainerFilter implements Co
private void updateResponseHeaders(ContainerRequestContext requestContext,
ContainerResponseContext responseContext) {
+
responseContext.getHeaders().add(AAIHeaderProperties.AAI_TX_ID, requestContext.getProperty(AAIHeaderProperties.AAI_TX_ID));
+
+ String responseContentType = responseContext.getHeaderString("Content-Type");
+
+ if(responseContentType == null){
+ String acceptType = requestContext.getHeaderString("Accept");
+
+ if(acceptType == null || "*/*".equals(acceptType)){
+ responseContext.getHeaders().putSingle("Content-Type", DEFAULT_XML_TYPE);
+ } else {
+ responseContext.getHeaders().putSingle("Content-Type", acceptType);
+ }
+ }
+
}
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
index a9592c4..547a7c8 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
@@ -19,16 +19,9 @@
*/
package org.onap.aai.interceptors.post;
-import java.io.IOException;
-import java.util.Objects;
-import java.util.Optional;
-
-import javax.annotation.Priority;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerResponseContext;
-import javax.ws.rs.container.ContainerResponseFilter;
-
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonObject;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.interceptors.AAIHeaderProperties;
@@ -36,9 +29,14 @@ import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.util.AAIConfig;
import org.springframework.beans.factory.annotation.Autowired;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.google.gson.JsonObject;
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Optional;
@Priority(AAIResponseFilterPriority.RESPONSE_TRANS_LOGGING)
public class ResponseTransactionLogging extends AAIContainerFilter implements ContainerResponseFilter {
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
index ae75776..c3d9d3b 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -20,19 +20,27 @@
package org.onap.aai.interceptors.pre;
public final class AAIRequestFilterPriority {
+
+ private AAIRequestFilterPriority() {}
+
+ public static final int REQUEST_TRANS_LOGGING = 1000;
+
+ public static final int HEADER_VALIDATION = 2000;
- public static final int REQUEST_TRANS_LOGGING = 1000;
+ public static final int SET_LOGGING_CONTEXT = 3000;
- public static final int HEADER_VALIDATION = 2000;
+ public static final int HTTP_HEADER = 4000;
- public static final int SET_LOGGING_CONTEXT = 3000;
+ public static final int LATEST = 4250;
- public static final int AUTHORIZATION = 4000;
+ public static final int AUTHORIZATION = 4500;
- public static final int HEADER_MANIPULATION = 5000;
+ public static final int RETIRED_SERVICE = 5000;
- public static final int REQUEST_MODIFICATION = 6000;
+ public static final int VERSION = 5500;
+
+ public static final int HEADER_MANIPULATION = 6000;
+
+ public static final int REQUEST_MODIFICATION = 7000;
- private AAIRequestFilterPriority() {
- }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
index dfc4376..d6b6080 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
@@ -19,11 +19,10 @@
*/
package org.onap.aai.interceptors.pre;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import java.util.UUID;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
import javax.annotation.Priority;
import javax.ws.rs.container.ContainerRequestContext;
@@ -31,11 +30,11 @@ import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.interceptors.AAIContainerFilter;
-import org.onap.aai.interceptors.AAIHeaderProperties;
-import org.onap.aai.logging.ErrorLogHelper;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
@PreMatching
@Priority(AAIRequestFilterPriority.HEADER_VALIDATION)
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java
new file mode 100644
index 0000000..ee04453
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java
@@ -0,0 +1,50 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+
+import javax.annotation.Priority;
+import javax.ws.rs.HttpMethod;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import java.io.IOException;
+
+/**
+ * The Class HttpHeaderInterceptor
+ */
+@PreMatching
+@Priority(AAIRequestFilterPriority.HTTP_HEADER)
+public class HttpHeaderInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+ public static final String patchMethod = "PATCH";
+
+ @Override
+ public void filter(ContainerRequestContext containerRequestContext) throws IOException {
+ String overrideMethod = containerRequestContext.getHeaderString(AAIHeaderProperties.HTTP_METHOD_OVERRIDE);
+ String httpMethod = containerRequestContext.getMethod();
+
+ if (HttpMethod.POST.equalsIgnoreCase(httpMethod) && patchMethod.equalsIgnoreCase(overrideMethod)) {
+ containerRequestContext.setMethod(patchMethod);
+ }
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
index 8fe2d6e..1f8a6ec 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
@@ -19,11 +19,13 @@
*/
package org.onap.aai.interceptors.pre;
+import org.onap.aai.Profiles;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.interceptors.AAIContainerFilter;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.service.AuthorizationService;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
import javax.annotation.Priority;
import javax.ws.rs.container.ContainerRequestContext;
@@ -36,6 +38,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
+@Profile(Profiles.ONE_WAY_SSL)
@PreMatching
@Priority(AAIRequestFilterPriority.AUTHORIZATION)
public class OneWaySslAuthorization extends AAIContainerFilter implements ContainerRequestFilter {
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
index 7f74f1e..d2dcc0b 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
@@ -19,35 +19,25 @@
*/
package org.onap.aai.interceptors.pre;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
import javax.annotation.Priority;
-import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.MultivaluedMap;
-
-import org.onap.aai.interceptors.AAIContainerFilter;
-import org.onap.aai.interceptors.AAIHeaderProperties;
-import org.springframework.beans.factory.annotation.Autowired;
+import java.util.Collections;
+import java.util.regex.Matcher;
@PreMatching
@Priority(AAIRequestFilterPriority.HEADER_MANIPULATION)
public class RequestHeaderManipulation extends AAIContainerFilter implements ContainerRequestFilter {
- @Autowired
- private HttpServletRequest httpServletRequest;
-
- private static final Pattern versionedEndpoint = Pattern.compile("^/aai/(v\\d+)");
-
@Override
- public void filter(ContainerRequestContext requestContext) throws IOException {
+ public void filter(ContainerRequestContext requestContext) {
- String uri = httpServletRequest.getRequestURI();
+ String uri = requestContext.getUriInfo().getPath();
this.addRequestContext(uri, requestContext.getHeaders());
}
@@ -56,7 +46,7 @@ public class RequestHeaderManipulation extends AAIContainerFilter implements Con
String rc = "";
- Matcher match = versionedEndpoint.matcher(uri);
+ Matcher match = VersionInterceptor.EXTRACT_VERSION_PATTERN.matcher(uri);
if (match.find()) {
rc = match.group(1);
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
index 50e87b0..acd1305 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
@@ -19,12 +19,7 @@
*/
package org.onap.aai.interceptors.pre;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
+import org.onap.aai.interceptors.AAIContainerFilter;
import javax.annotation.Priority;
import javax.ws.rs.container.ContainerRequestContext;
@@ -32,11 +27,15 @@ import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriBuilder;
-
-import org.onap.aai.interceptors.AAIContainerFilter;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
@PreMatching
-@Priority(AAIRequestFilterPriority.HEADER_VALIDATION)
+@Priority(AAIRequestFilterPriority.REQUEST_MODIFICATION)
public class RequestModification extends AAIContainerFilter implements ContainerRequestFilter {
@Override
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
index 011a041..6c86f19 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
@@ -19,20 +19,7 @@
*/
package org.onap.aai.interceptors.pre;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Random;
-import java.util.UUID;
-
-import javax.annotation.Priority;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.container.ContainerRequestContext;
-import javax.ws.rs.container.ContainerRequestFilter;
-import javax.ws.rs.container.PreMatching;
-import javax.ws.rs.core.MediaType;
-
+import com.google.gson.JsonObject;
import org.glassfish.jersey.message.internal.ReaderWriter;
import org.glassfish.jersey.server.ContainerException;
import org.onap.aai.exceptions.AAIException;
@@ -42,8 +29,21 @@ import org.onap.aai.util.AAIConfig;
import org.onap.aai.util.AAIConstants;
import org.onap.aai.util.HbaseSaltPrefixer;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.util.StringUtils;
-import com.google.gson.JsonObject;
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.SecureRandom;
+import java.util.Random;
+import java.util.UUID;
@PreMatching
@Priority(AAIRequestFilterPriority.REQUEST_TRANS_LOGGING)
@@ -52,6 +52,13 @@ public class RequestTransactionLogging extends AAIContainerFilter implements Con
@Autowired
private HttpServletRequest httpServletRequest;
+ private static final String DEFAULT_CONTENT_TYPE = MediaType.APPLICATION_JSON;
+ private static final String DEFAULT_RESPONSE_TYPE = MediaType.APPLICATION_XML;
+
+ private static final String CONTENT_TYPE = "Content-Type";
+ private static final String ACCEPT = "Accept";
+ private static final String TEXT_PLAIN = "text/plain";
+
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
@@ -60,17 +67,34 @@ public class RequestTransactionLogging extends AAIContainerFilter implements Con
this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_TX_ID, fullId);
this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST, this.getRequest(requestContext, fullId));
this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST_TS, currentTimeStamp);
+ this.addDefaultContentType(requestContext);
}
private void addToRequestContext(ContainerRequestContext requestContext, String name, String aaiTxIdToHeader) {
requestContext.setProperty(name, aaiTxIdToHeader);
}
+ private void addDefaultContentType(ContainerRequestContext requestContext) {
+
+ String contentType = requestContext.getHeaderString(CONTENT_TYPE);
+ String acceptType = requestContext.getHeaderString(ACCEPT);
+
+ if(contentType == null || contentType.contains(TEXT_PLAIN)){
+ requestContext.getHeaders().putSingle(CONTENT_TYPE, DEFAULT_CONTENT_TYPE);
+ }
+
+ if(StringUtils.isEmpty(acceptType) || acceptType.contains(TEXT_PLAIN)){
+ requestContext.getHeaders().putSingle(ACCEPT, DEFAULT_RESPONSE_TYPE);
+ }
+ }
+
private String getAAITxIdToHeader(String currentTimeStamp) {
String txId = UUID.randomUUID().toString();
try {
+ Random rand = new SecureRandom();
+ int number = rand.nextInt(99999);
txId = HbaseSaltPrefixer.getInstance().prependSalt(AAIConfig.get(AAIConstants.AAI_NODENAME) + "-"
- + currentTimeStamp + "-" + new Random(System.currentTimeMillis()).nextInt(99999));
+ + currentTimeStamp + "-" + number ); //new Random(System.currentTimeMillis()).nextInt(99999)
} catch (AAIException e) {
}
@@ -82,14 +106,7 @@ public class RequestTransactionLogging extends AAIContainerFilter implements Con
JsonObject request = new JsonObject();
request.addProperty("ID", fullId);
request.addProperty("Http-Method", requestContext.getMethod());
- String contentType = httpServletRequest.getContentType();
-
- if(contentType == null){
- contentType = MediaType.APPLICATION_JSON;
- requestContext.getHeaders().add("Content-Type", contentType);
- }
-
- request.addProperty("Content-Type", contentType);
+ request.addProperty(CONTENT_TYPE, httpServletRequest.getContentType());
request.addProperty("Headers", requestContext.getHeaders().toString());
ByteArrayOutputStream out = new ByteArrayOutputStream();
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java
new file mode 100644
index 0000000..643793d
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java
@@ -0,0 +1,148 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.service.RetiredService;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// Can cache this so if the uri was already cached then it won't run the string
+// matching each time but only does it for the first time
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.RETIRED_SERVICE)
+public class RetiredInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+ private static final Pattern VERSION_PATTERN = Pattern.compile("v\\d+|latest");
+
+ private RetiredService retiredService;
+
+ private String basePath;
+
+ @Autowired
+ public RetiredInterceptor(RetiredService retiredService, @Value("${schema.uri.base.path}") String basePath){
+ this.retiredService = retiredService;
+ this.basePath = basePath;
+ if(!basePath.endsWith("/")){
+ this.basePath = basePath + "/";
+ }
+ }
+ @Override
+ public void filter(ContainerRequestContext containerRequestContext) throws IOException {
+
+ String requestURI = containerRequestContext.getUriInfo().getAbsolutePath().getPath();
+
+ String version = extractVersionFromPath(requestURI);
+
+ List<Pattern> retiredAllVersionList = retiredService.getRetiredAllVersionList();
+
+
+ if(checkIfUriRetired(containerRequestContext, retiredAllVersionList, version, requestURI, "")){
+ return;
+ }
+
+ List<Pattern> retiredVersionList = retiredService.getRetiredPatterns();
+
+ checkIfUriRetired(containerRequestContext, retiredVersionList, version, requestURI);
+ }
+
+ public boolean checkIfUriRetired(ContainerRequestContext containerRequestContext,
+ List<Pattern> retiredPatterns,
+ String version,
+ String requestURI,
+ String message){
+
+
+ for(Pattern retiredPattern : retiredPatterns){
+ if(retiredPattern.matcher(requestURI).matches()){
+ AAIException e;
+
+ if(message == null){
+ e = new AAIException("AAI_3007");
+ } else {
+ e = new AAIException("AAI_3015");
+ }
+
+ ArrayList<String> templateVars = new ArrayList<>();
+
+ if (templateVars.isEmpty()) {
+ templateVars.add("PUT");
+ if(requestURI != null){
+ requestURI = requestURI.replaceAll(basePath, "");
+ }
+ templateVars.add(requestURI);
+ if(message == null){
+ templateVars.add(version);
+ templateVars.add(AAIConfig.get("aai.default.api.version", ""));
+ }
+ }
+
+ Response response = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(
+ ErrorLogHelper
+ .getRESTAPIErrorResponse(
+ containerRequestContext.getAcceptableMediaTypes(), e, templateVars
+ )
+ )
+ .build();
+
+ containerRequestContext.abortWith(response);
+
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public boolean checkIfUriRetired(ContainerRequestContext containerRequestContext,
+ List<Pattern> retiredPatterns,
+ String version,
+ String requestURI){
+ return checkIfUriRetired(containerRequestContext, retiredPatterns, version, requestURI, null);
+ }
+
+ protected String extractVersionFromPath(String requestURI) {
+ Matcher versionMatcher = VERSION_PATTERN.matcher(requestURI);
+ String version = null;
+
+ if(versionMatcher.find()){
+ version = versionMatcher.group(0);
+ }
+ return version;
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
index c991017..4b0f18a 100644
--- a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
@@ -19,19 +19,18 @@
*/
package org.onap.aai.interceptors.pre;
-import java.io.IOException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.LoggingContext;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.env.Environment;
import javax.annotation.Priority;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
-
-import org.onap.aai.interceptors.AAIContainerFilter;
-import org.onap.aai.interceptors.AAIHeaderProperties;
-import org.onap.aai.logging.LoggingContext;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.core.env.Environment;
+import java.io.IOException;
@PreMatching
@Priority(AAIRequestFilterPriority.SET_LOGGING_CONTEXT)
@@ -64,6 +63,7 @@ public class SetLoggingContext extends AAIContainerFilter implements ContainerRe
LoggingContext.component(fromAppId);
LoggingContext.serviceName(httpMethod + " " + uri);
LoggingContext.targetServiceName(httpMethod + " " + uri);
+ LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
}
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java
new file mode 100644
index 0000000..bc03082
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java
@@ -0,0 +1,185 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.auth.AAIAuthCore;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.HttpMethod;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
+
+import javax.annotation.Priority;
+import javax.security.auth.x500.X500Principal;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.security.cert.X509Certificate;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.AUTHORIZATION)
+@Profile("two-way-ssl")
+public class TwoWaySslAuthorization extends AAIContainerFilter implements ContainerRequestFilter {
+
+ @Autowired
+ private HttpServletRequest httpServletRequest;
+
+ @Autowired
+ private AAIAuthCore aaiAuthCore;
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) {
+
+ Optional<Response> oResp;
+
+ String uri = requestContext.getUriInfo().getAbsolutePath().getPath();
+ String httpMethod = getHttpMethod(requestContext);
+
+ List<MediaType> acceptHeaderValues = requestContext.getAcceptableMediaTypes();
+
+ Optional<String> authUser = getUser(this.httpServletRequest);
+
+ if (authUser.isPresent()) {
+ oResp = this.authorize(uri, httpMethod, acceptHeaderValues, authUser.get(),
+ this.getHaProxyUser(this.httpServletRequest), getCertIssuer(this.httpServletRequest));
+ if (oResp.isPresent()) {
+ requestContext.abortWith(oResp.get());
+ return;
+ }
+ } else {
+ AAIException aaie = new AAIException("AAI_9107");
+ requestContext
+ .abortWith(Response
+ .status(aaie.getErrorObject().getHTTPResponseCode()).entity(ErrorLogHelper
+ .getRESTAPIErrorResponseWithLogging(acceptHeaderValues, aaie, new ArrayList<>()))
+ .build());
+ }
+
+ }
+
+ private String getCertIssuer(HttpServletRequest hsr) {
+ String issuer = hsr.getHeader("X-AAI-SSL-Issuer");
+ if (issuer != null && !issuer.isEmpty()) {
+ // the haproxy header replaces the ', ' with '/' and reverses on the '/' need to undo that.
+ List<String> broken = Arrays.asList(issuer.split("/"));
+ broken = broken.stream().filter(s -> !s.isEmpty()).collect(Collectors.toList());
+ Collections.reverse(broken);
+ issuer = String.join(", ", broken);
+ } else {
+ if (hsr.getAttribute("javax.servlet.request.cipher_suite") != null) {
+ X509Certificate[] certChain = (X509Certificate[]) hsr.getAttribute("javax.servlet.request.X509Certificate");
+ if (certChain != null && certChain.length > 0) {
+ X509Certificate clientCert = certChain[0];
+ issuer = clientCert.getIssuerX500Principal().getName();
+ }
+ }
+ }
+ return issuer;
+ }
+
+ private String getHttpMethod(ContainerRequestContext requestContext) {
+ String httpMethod = requestContext.getMethod();
+ if ("POST".equalsIgnoreCase(httpMethod)
+ && "PATCH".equals(requestContext.getHeaderString(AAIHeaderProperties.HTTP_METHOD_OVERRIDE))) {
+ httpMethod = HttpMethod.MERGE_PATCH.toString();
+ }
+ if (httpMethod.equalsIgnoreCase(HttpMethod.MERGE_PATCH.toString()) || "patch".equalsIgnoreCase(httpMethod)) {
+ httpMethod = HttpMethod.PUT.toString();
+ }
+ return httpMethod;
+ }
+
+ private Optional<String> getUser(HttpServletRequest hsr) {
+ String authUser = null;
+ if (hsr.getAttribute("javax.servlet.request.cipher_suite") != null) {
+ X509Certificate[] certChain = (X509Certificate[]) hsr.getAttribute("javax.servlet.request.X509Certificate");
+
+ /*
+ * If the certificate is null or the certificate chain length is zero Then
+ * retrieve the authorization in the request header Authorization Check that it
+ * is not null and that it starts with Basic and then strip the basic portion to
+ * get the base64 credentials Check if this is contained in the AAIBasicAuth
+ * Singleton class If it is, retrieve the username associated with that
+ * credentials and set to authUser Otherwise, get the principal from certificate
+ * and use that authUser
+ */
+
+ if (certChain == null || certChain.length == 0) {
+
+ String authorization = hsr.getHeader("Authorization");
+
+ if (authorization != null && authorization.startsWith("Basic ")) {
+ authUser = authorization.replace("Basic ", "");
+ }
+
+ } else {
+ X509Certificate clientCert = certChain[0];
+ X500Principal subjectDN = clientCert.getSubjectX500Principal();
+ authUser = subjectDN.toString().toLowerCase();
+ }
+ }
+
+ return Optional.ofNullable(authUser);
+ }
+
+ private String getHaProxyUser(HttpServletRequest hsr) {
+ String haProxyUser;
+ if (Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-CN"))
+ || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-OU"))
+ || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-O"))
+ || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-L"))
+ || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-ST"))
+ || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-C"))) {
+ haProxyUser = "";
+ } else {
+ haProxyUser = String.format("CN=%s, OU=%s, O=\"%s\", L=%s, ST=%s, C=%s",
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-CN"), ""),
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-OU"), ""),
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-O"), ""),
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-L"), ""),
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-ST"), ""),
+ Objects.toString(hsr.getHeader("X-AAI-SSL-Client-C"), "")).toLowerCase();
+ }
+ return haProxyUser;
+ }
+
+ private Optional<Response> authorize(String uri, String httpMethod, List<MediaType> acceptHeaderValues,
+ String authUser, String haProxyUser, String issuer) {
+ Response response = null;
+ try {
+ if (!aaiAuthCore.authorize(authUser, uri, httpMethod, haProxyUser, issuer)) {
+ throw new AAIException("AAI_9101", "Request on " + httpMethod + " " + uri + " status is not OK");
+ }
+ } catch (AAIException e) {
+ response = Response.status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponseWithLogging(acceptHeaderValues, e, new ArrayList<>()))
+ .build();
+ }
+ return Optional.ofNullable(response);
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java
new file mode 100644
index 0000000..df9807c
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java
@@ -0,0 +1,101 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.Response;
+import java.util.ArrayList;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.VERSION)
+public class VersionInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+ public static final Pattern EXTRACT_VERSION_PATTERN = Pattern.compile("^(v[1-9][0-9]*).*$");
+
+ private final Set<String> allowedVersions;
+
+ private final SchemaVersions schemaVersions;
+
+ @Autowired
+ public VersionInterceptor(SchemaVersions schemaVersions){
+ this.schemaVersions = schemaVersions;
+ allowedVersions = schemaVersions.getVersions()
+ .stream()
+ .map(SchemaVersion::toString)
+ .collect(Collectors.toSet());
+
+ }
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) {
+
+ String uri = requestContext.getUriInfo().getPath();
+
+ if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools") || uri.startsWith("recents")) {
+ return;
+ }
+
+ Matcher matcher = EXTRACT_VERSION_PATTERN.matcher(uri);
+
+ String version = null;
+ if(matcher.matches()){
+ version = matcher.group(1);
+ } else {
+ requestContext.abortWith(createInvalidVersionResponse("AAI_3017", requestContext, version));
+ return;
+ }
+
+ if(!allowedVersions.contains(version)){
+ requestContext.abortWith(createInvalidVersionResponse("AAI_3016", requestContext, version));
+ }
+ }
+
+ private Response createInvalidVersionResponse(String errorCode, ContainerRequestContext context, String version) {
+ AAIException e = new AAIException(errorCode);
+ ArrayList<String> templateVars = new ArrayList<>();
+
+ if (templateVars.isEmpty()) {
+ templateVars.add(context.getMethod());
+ templateVars.add(context.getUriInfo().getPath());
+ templateVars.add(version);
+ }
+
+ String entity = ErrorLogHelper.getRESTAPIErrorResponse(context.getAcceptableMediaTypes(), e, templateVars);
+
+ return Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(entity)
+ .build();
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java
new file mode 100644
index 0000000..124bc1a
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java
@@ -0,0 +1,57 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import java.net.URI;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.LATEST)
+public class VersionLatestInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+ private final SchemaVersions schemaVersions;
+
+ @Autowired
+ public VersionLatestInterceptor(SchemaVersions schemaVersions){
+ this.schemaVersions = schemaVersions;
+ }
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) {
+
+ String uri = requestContext.getUriInfo().getPath();
+
+ if(uri.startsWith("latest")){
+ String rawPath = requestContext.getUriInfo().getRequestUri().getRawPath();
+ String updatedPath = rawPath.replaceFirst("latest",schemaVersions.getDefaultVersion().toString());
+ URI latest = requestContext.getUriInfo().getRequestUriBuilder().replacePath(updatedPath).build();
+ requestContext.setRequestUri(latest);
+ return;
+ }
+
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/DslConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/DslConsumer.java
new file mode 100644
index 0000000..bdca63b
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/DslConsumer.java
@@ -0,0 +1,187 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.Encoded;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import javax.ws.rs.core.Response.Status;
+
+import org.onap.aai.concurrent.AaiCallable;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.StopWatch;
+import org.onap.aai.rest.db.HttpEntry;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.rest.search.GenericQueryProcessor;
+import org.onap.aai.rest.search.GremlinServerSingleton;
+import org.onap.aai.rest.search.QueryProcessorType;
+import org.onap.aai.restcore.HttpMethod;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.Format;
+import org.onap.aai.serialization.queryformats.FormatFactory;
+import org.onap.aai.serialization.queryformats.Formatter;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.TraversalConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+@Path("{version: v[1-9][0-9]*|latest}/dsl")
+public class DslConsumer extends RESTAPI {
+
+ private HttpEntry traversalUriHttpEntry;
+
+ private QueryProcessorType processorType = QueryProcessorType.LOCAL_GROOVY;
+
+ private static final String TARGET_ENTITY = "DB";
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DslConsumer.class);
+
+ private DslQueryProcessor dslQueryProcessor;
+
+ private SchemaVersions schemaVersions;
+
+ private String basePath;
+
+ private GremlinServerSingleton gremlinServerSingleton;
+
+ @Autowired
+ public DslConsumer(HttpEntry traversalUriHttpEntry, DslQueryProcessor dslQueryProcessor,
+ SchemaVersions schemaVersions, GremlinServerSingleton gremlinServerSingleton,
+ @Value("${schema.uri.base.path}") String basePath) {
+ this.traversalUriHttpEntry = traversalUriHttpEntry;
+ this.dslQueryProcessor = dslQueryProcessor;
+ this.schemaVersions = schemaVersions;
+ this.gremlinServerSingleton = gremlinServerSingleton;
+ this.basePath = basePath;
+ }
+
+ @PUT
+ @Consumes({ MediaType.APPLICATION_JSON })
+ @Produces({ MediaType.APPLICATION_JSON })
+ public Response executeQuery(String content, @PathParam("version") String versionParam,
+ @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,
+ @DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers,
+ @Context UriInfo info, @Context HttpServletRequest req) {
+ return runner(TraversalConstants.AAI_TRAVERSAL_DSL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_DSL_TIMEOUT_APP, TraversalConstants.AAI_TRAVERSAL_DSL_TIMEOUT_LIMIT,
+ headers, info, HttpMethod.PUT, new AaiCallable<Response>() {
+ @Override
+ public Response process() {
+ return processExecuteQuery(content, versionParam, uri, queryFormat, subgraph, headers, info,
+ req);
+ }
+ });
+ }
+
+ public Response processExecuteQuery(String content, @PathParam("version") String versionParam,
+ @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,
+ @DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers,
+ @Context UriInfo info, @Context HttpServletRequest req) {
+
+ String methodName = "executeDslQuery";
+ String sourceOfTruth = headers.getRequestHeaders().getFirst("X-FromAppId");
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ Response response;
+ SchemaVersion version = new SchemaVersion(versionParam);
+
+ TransactionalGraphEngine dbEngine = null;
+ try {
+ LoggingContext.save();
+ DBConnectionType type = this.determineConnectionType(sourceOfTruth, realTime);
+ traversalUriHttpEntry.setHttpEntryProperties(version, type);
+ dbEngine = traversalUriHttpEntry.getDbEngine();
+ JsonObject input = new JsonParser().parse(content).getAsJsonObject();
+ JsonElement dslElement = input.get("dsl");
+ String dsl = "";
+ if (dslElement != null) {
+ dsl = dslElement.getAsString();
+ }
+
+ LoggingContext.targetEntity(TARGET_ENTITY);
+ LoggingContext.targetServiceName(methodName);
+ LoggingContext.startTime();
+ StopWatch.conditionalStart();
+
+ GenericQueryProcessor processor = new GenericQueryProcessor.Builder(dbEngine, gremlinServerSingleton)
+ .queryFrom(dsl, "dsl").queryProcessor(dslQueryProcessor).processWith(processorType).create();
+
+ String result = "";
+ SubGraphStyle subGraphStyle = SubGraphStyle.valueOf(subgraph);
+ List<Object> vertices = processor.execute(subGraphStyle);
+
+ DBSerializer serializer = new DBSerializer(version, dbEngine, ModelType.MOXY, sourceOfTruth);
+ Format format = Format.getFormat(queryFormat);
+ FormatFactory ff = new FormatFactory(traversalUriHttpEntry.getLoader(), serializer, schemaVersions,
+ this.basePath);
+
+ Formatter formater = ff.get(format, info.getQueryParameters());
+
+ result = formater.output(vertices).toString();
+
+ double msecs = StopWatch.stopIfStarted();
+ LoggingContext.elapsedTime((long) msecs, TimeUnit.MILLISECONDS);
+ LoggingContext.successStatusFields();
+ LOGGER.info("Completed");
+
+ response = Response.status(Status.OK).type(MediaType.APPLICATION_JSON).entity(result).build();
+
+ } catch (AAIException e) {
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.PUT, e);
+ } catch (Exception e) {
+ AAIException ex = new AAIException("AAI_4000", e);
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.PUT, ex);
+ } finally {
+ LoggingContext.restoreIfPossible();
+ LoggingContext.successStatusFields();
+ if (dbEngine != null) {
+ dbEngine.rollback();
+ }
+
+ }
+
+ return response;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java
index 6f6abd1..7ce61e3 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java
@@ -25,7 +25,6 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.Callable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
@@ -45,10 +44,10 @@ import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.concurrent.AaiCallable;
import org.onap.aai.dbmap.DBConnectionType;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.ModelType;
-import org.onap.aai.introspection.Version;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.parsers.query.QueryParser;
import org.onap.aai.rest.db.HttpEntry;
@@ -69,18 +68,20 @@ import org.onap.aai.serialization.queryformats.SubGraphStyle;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import org.onap.aai.logging.LoggingContext;
-import org.onap.aai.logging.LoggingContext.StatusCode;
import org.onap.aai.logging.StopWatch;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.TraversalConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
-@Path("{version: v9|v1[01234]}/query")
+@Path("{version: v[1-9][0-9]*|latest}/query")
public class QueryConsumer extends RESTAPI {
-
- private static final String DEPTH = "depth";
/** The introspector factory type. */
private ModelType introspectorFactoryType = ModelType.MOXY;
@@ -91,20 +92,42 @@ public class QueryConsumer extends RESTAPI {
private static final String TARGET_ENTITY = "DB";
private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(QueryConsumer.class);
+
+ private HttpEntry traversalUriHttpEntry;
+
+ private SchemaVersions schemaVersions;
+
+ private String basePath;
+
+ private GremlinServerSingleton gremlinServerSingleton;
+
+ @Autowired
+ public QueryConsumer(
+ HttpEntry traversalUriHttpEntry,
+ SchemaVersions schemaVersions,
+ GremlinServerSingleton gremlinServerSingleton,
+ @Value("${schema.uri.base.path}") String basePath
+ ){
+ this.traversalUriHttpEntry = traversalUriHttpEntry;
+ this.schemaVersions = schemaVersions;
+ this.gremlinServerSingleton = gremlinServerSingleton;
+ this.basePath = basePath;
+ }
+
@PUT
@Consumes({ MediaType.APPLICATION_JSON})
@Produces({ MediaType.APPLICATION_JSON})
public Response executeQuery(String content, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,@DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req){
- return runner(AAIConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_APP,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
headers,
info,
HttpMethod.GET,
- new Callable<Response>() {
+ new AaiCallable<Response>() {
@Override
- public Response call() {
+ public Response process() {
return processExecuteQuery(content, versionParam, uri, queryFormat, subgraph, headers, info, req);
}
}
@@ -123,7 +146,7 @@ public class QueryConsumer extends RESTAPI {
try {
LoggingContext.save();
this.checkQueryParams(info.getQueryParameters());
- Format format = Format.valueOf(queryFormat);
+ Format format = Format.getFormat(queryFormat);
if (queryProcessor != null) {
processorType = QueryProcessorType.valueOf(queryProcessor);
}
@@ -135,19 +158,17 @@ public class QueryConsumer extends RESTAPI {
JsonElement startElement = input.get("start");
JsonElement queryElement = input.get("query");
JsonElement gremlinElement = input.get("gremlin");
- JsonElement dslElement = input.get("dsl");
List<URI> startURIs = new ArrayList<>();
String queryURI = "";
String gremlin = "";
- String dsl = "";
- Version version = Version.valueOf(versionParam);
+ SchemaVersion version = new SchemaVersion(versionParam);
DBConnectionType type = this.determineConnectionType(sourceOfTruth, realTime);
- HttpEntry httpEntry = new HttpEntry(version, introspectorFactoryType, queryStyle, type);
- dbEngine = httpEntry.getDbEngine();
+ traversalUriHttpEntry.setHttpEntryProperties(version, type);
+ dbEngine = traversalUriHttpEntry.getDbEngine();
if (startElement != null) {
-
+
if (startElement.isJsonArray()) {
for (JsonElement element : startElement.getAsJsonArray()) {
startURIs.add(new URI(element.getAsString()));
@@ -162,9 +183,6 @@ public class QueryConsumer extends RESTAPI {
if (gremlinElement != null) {
gremlin = gremlinElement.getAsString();
}
- if (dslElement != null) {
- dsl = dslElement.getAsString();
- }
URI queryURIObj = new URI(queryURI);
CustomQueryConfig customQueryConfig = getCustomQueryConfig(queryURIObj);
@@ -184,7 +202,7 @@ public class QueryConsumer extends RESTAPI {
LoggingContext.targetServiceName(methodName);
LoggingContext.startTime();
StopWatch.conditionalStart();
-
+
if (!startURIs.isEmpty()) {
Set<Vertex> vertexSet = new LinkedHashSet<>();
QueryParser uriQuery;
@@ -195,19 +213,16 @@ public class QueryConsumer extends RESTAPI {
vertexSet.addAll(vertices);
}
- processor = new GenericQueryProcessor.Builder(dbEngine)
+
+ processor = new GenericQueryProcessor.Builder(dbEngine, gremlinServerSingleton)
.startFrom(vertexSet).queryFrom(queryURIObj)
.processWith(processorType).create();
} else if (!queryURI.equals("")){
- processor = new GenericQueryProcessor.Builder(dbEngine)
+ processor = new GenericQueryProcessor.Builder(dbEngine, gremlinServerSingleton)
.queryFrom(queryURIObj)
.processWith(processorType).create();
- } else if(!dsl.equals("")){
- processor = new GenericQueryProcessor.Builder(dbEngine)
- .queryFrom(dsl, "dsl")
- .processWith(processorType).create();
- }else {
- processor = new GenericQueryProcessor.Builder(dbEngine)
+ } else {
+ processor = new GenericQueryProcessor.Builder(dbEngine, gremlinServerSingleton)
.queryFrom(gremlin, "gremlin")
.processWith(processorType).create();
}
@@ -215,7 +230,7 @@ public class QueryConsumer extends RESTAPI {
List<Object> vertices = processor.execute(subGraphStyle);
DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, sourceOfTruth);
- FormatFactory ff = new FormatFactory(httpEntry.getLoader(), serializer);
+ FormatFactory ff = new FormatFactory(traversalUriHttpEntry.getLoader(), serializer, schemaVersions, this.basePath);
Formatter formater = ff.get(format, info.getQueryParameters());
@@ -250,8 +265,8 @@ public class QueryConsumer extends RESTAPI {
public void checkQueryParams(MultivaluedMap<String, String> params) throws AAIException {
- if (params.containsKey(DEPTH) && params.getFirst(DEPTH).matches("\\d+")) {
- String depth = params.getFirst(DEPTH);
+ if (params.containsKey("depth") && params.getFirst("depth").matches("\\d+")) {
+ String depth = params.getFirst("depth");
Integer i = Integer.parseInt(depth);
if (i > 1) {
throw new AAIException("AAI_3303");
@@ -274,7 +289,6 @@ public class QueryConsumer extends RESTAPI {
private CustomQueryConfig getCustomQueryConfig(URI uriObj ) {
- GremlinServerSingleton gremlinServerSingleton;
CustomQueryConfig customQueryConfig;
String path = uriObj.getPath();
@@ -282,7 +296,6 @@ public class QueryConsumer extends RESTAPI {
boolean hasQuery = false;
for ( String part:parts ) {
if ( hasQuery) {
- gremlinServerSingleton = GremlinServerSingleton.getInstance();
return gremlinServerSingleton.getCustomQueryConfig(part);
}
if ( "query".equals(part)) {
@@ -303,11 +316,13 @@ public class QueryConsumer extends RESTAPI {
templateVars.add(missingRequiredQueryParams.toString());
}
- return Response
+ Response response = Response
.status(e.getErrorObject().getHTTPResponseCode())
.entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e,
templateVars)).build();
- }
+
+ return response;
+ }
private Response createMessageInvalidQuerySection(String invalidQuery, HttpHeaders headers, UriInfo info, HttpServletRequest req) {
AAIException e = new AAIException("AAI_3014");
@@ -318,11 +333,13 @@ public class QueryConsumer extends RESTAPI {
templateVars.add(invalidQuery);
}
- return Response
+ Response response = Response
.status(e.getErrorObject().getHTTPResponseCode())
.entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e,
templateVars)).build();
- }
+
+ return response;
+ }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/RecentAPIConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/RecentAPIConsumer.java
new file mode 100644
index 0000000..46bccdf
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/RecentAPIConsumer.java
@@ -0,0 +1,253 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.core.UriInfo;
+
+import org.onap.aai.concurrent.AaiCallable;
+import org.onap.aai.dbmap.DBConnectionType;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.rest.db.HttpEntry;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.rest.search.GenericQueryProcessor;
+import org.onap.aai.rest.search.GremlinServerSingleton;
+import org.onap.aai.rest.search.QueryProcessorType;
+import org.onap.aai.restcore.HttpMethod;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.Format;
+import org.onap.aai.serialization.queryformats.FormatFactory;
+import org.onap.aai.serialization.queryformats.Formatter;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.StopWatch;
+
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.TraversalConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+
+@Path("/recents/{version: v[1-9][0-9]*|latest}")
+public class RecentAPIConsumer extends RESTAPI {
+
+ /** The introspector factory type. */
+ private ModelType introspectorFactoryType = ModelType.MOXY;
+
+ private QueryProcessorType processorType = QueryProcessorType.LOCAL_GROOVY;
+ /** The query style. */
+
+ private static final String TARGET_ENTITY = "DB";
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(RecentAPIConsumer.class);
+
+ private HttpEntry traversalUriHttpEntry;
+
+ private DslQueryProcessor dslQueryProcessor;
+
+ private SchemaVersions schemaVersions;
+
+ private String basePath;
+
+ private GremlinServerSingleton gremlinServerSingleton;
+
+
+ @Autowired
+ public RecentAPIConsumer(
+ HttpEntry traversalUriHttpEntry,
+ DslQueryProcessor dslQueryProcessor,
+ SchemaVersions schemaVersions,
+ GremlinServerSingleton gremlinServerSingleton,
+ @Value("${schema.uri.base.path}") String basePath
+ ){
+ this.traversalUriHttpEntry = traversalUriHttpEntry;
+ this.dslQueryProcessor = dslQueryProcessor;
+ this.schemaVersions = schemaVersions;
+ this.gremlinServerSingleton = gremlinServerSingleton;
+ this.basePath = basePath;
+ }
+
+ @GET
+ @Path("/{nodeType: .+}")
+ @Consumes({ MediaType.APPLICATION_JSON })
+ @Produces({ MediaType.APPLICATION_JSON })
+ public Response getRecentData(String content, @PathParam("version") String versionParam,
+ @PathParam("nodeType") String nodeType, @Context HttpHeaders headers, @Context UriInfo info) {
+
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED, TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT, headers, info, HttpMethod.GET, new AaiCallable<Response>() {
+ @Override
+ public Response process() {
+ return processRecentData(content, versionParam, nodeType, info, headers);
+ }
+ });
+
+ }
+
+ public Response processRecentData(String content, @PathParam("version") String versionParam,
+ @PathParam("nodeType") String nodeType, @Context UriInfo info, @Context HttpHeaders headers) {
+
+ String methodName = "processRecentData";
+ String sourceOfTruth = headers.getRequestHeaders().getFirst("X-FromAppId");
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ String queryProcessor = headers.getRequestHeaders().getFirst("QueryProcessor");
+ QueryProcessorType processorType = this.processorType;
+ Response response = null;
+ TransactionalGraphEngine dbEngine = null;
+ try {
+ LoggingContext.save();
+
+ if (queryProcessor != null) {
+ processorType = QueryProcessorType.valueOf(queryProcessor);
+ }
+
+ SchemaVersion version = new SchemaVersion(versionParam);
+ this.checkVersion(version);
+
+ DBConnectionType type = this.determineConnectionType(sourceOfTruth, realTime);
+ traversalUriHttpEntry.setHttpEntryProperties(version, type);
+ dbEngine = traversalUriHttpEntry.getDbEngine();
+
+ /*
+ * Check for mandatory parameters here
+ */
+
+ this.checkNodeType(nodeType);
+ this.checkQueryParams(info.getQueryParameters());
+
+ GenericQueryProcessor processor = null;
+
+ LoggingContext.targetEntity(TARGET_ENTITY);
+ LoggingContext.targetServiceName(methodName);
+ LoggingContext.startTime();
+ StopWatch.conditionalStart();
+
+ processor = new GenericQueryProcessor.Builder(dbEngine, gremlinServerSingleton).queryFrom(nodeType, "nodeQuery")
+ .uriParams(info.getQueryParameters())
+ .processWith(processorType).create();
+
+
+
+ String result = "";
+ SubGraphStyle subGraphStyle = null;
+ List<Object> vertices = processor.execute(subGraphStyle);
+
+ DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, sourceOfTruth);
+ FormatFactory ff = new FormatFactory(traversalUriHttpEntry.getLoader(), serializer, schemaVersions, this.basePath);
+ Format format = Format.pathed_resourceversion;
+
+ Formatter formater = ff.get(format, info.getQueryParameters());
+
+ result = formater.output(vertices).toString();
+
+ double msecs = StopWatch.stopIfStarted();
+ LoggingContext.elapsedTime((long) msecs, TimeUnit.MILLISECONDS);
+ LoggingContext.successStatusFields();
+ LOGGER.info("Completed");
+
+ response = Response.status(Status.OK).type(MediaType.APPLICATION_JSON).entity(result).build();
+
+ } catch (AAIException e) {
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, e);
+ } catch (Exception e) {
+ AAIException ex = new AAIException("AAI_4000", e);
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, ex);
+ } finally {
+
+ LoggingContext.restoreIfPossible();
+ LoggingContext.successStatusFields();
+ if (dbEngine != null) {
+ dbEngine.rollback();
+ }
+
+ }
+
+ return response;
+ }
+
+ private void checkVersion(SchemaVersion version) throws AAIException {
+ if(!schemaVersions.getVersions().contains(version)){
+ throw new AAIException("AAI_3021", "Schema Version is not valid");
+ }
+ }
+
+ public void checkNodeType(String nodeType) throws AAIException {
+ try {
+ Introspector target = traversalUriHttpEntry.getLoader().introspectorFromName(nodeType);
+ } catch (AAIUnknownObjectException e) {
+ throw new AAIException("AAI_6115", "Unrecognized nodeType [" + nodeType + "] passed to recents query.");
+ }
+ }
+ public void checkQueryParams(MultivaluedMap<String, String> params) throws AAIException {
+
+ boolean isHoursParameter = false;
+ boolean isDateTimeParameter = false;
+
+ if (params != null && params.containsKey("hours") && params.getFirst("hours").matches("-?\\d+")) {
+ isHoursParameter = true;
+
+ Long hours = Long.parseLong(params.getFirst("hours"));
+ if (hours < 1 || hours > AAIConstants.HISTORY_MAX_HOURS) {
+ throw new AAIException("AAI_3021", " Valid values for hours are 1 to " + AAIConstants.HISTORY_MAX_HOURS);
+ }
+ }
+ if (params != null && params.containsKey("date-time") && params.getFirst("date-time").matches("-?\\d+")) {
+ isDateTimeParameter = true;
+ Long minStartTime = System.currentTimeMillis() - TimeUnit.HOURS.toMillis(AAIConstants.HISTORY_MAX_HOURS);
+ Long startTime = Long.parseLong(params.getFirst("date-time"));
+ if (startTime < minStartTime) {
+ throw new AAIException("AAI_3021", " Valid values for date-time are "+minStartTime+" to " + System.currentTimeMillis() );
+ }
+ }
+
+ if(!isHoursParameter && !isDateTimeParameter){
+ throw new AAIException("AAI_3021", "Send valid hours or date-time to specify the timebounds");
+ }
+
+ if(isHoursParameter && isDateTimeParameter){
+ throw new AAIException("AAI_3021", "Send either hours or date-time and not both to specify the timebounds");
+ }
+
+
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslContext.java b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslContext.java
new file mode 100644
index 0000000..3a3cc96
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslContext.java
@@ -0,0 +1,130 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.dsl;
+
+import java.util.Deque;
+import java.util.LinkedList;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class DslContext {
+
+ private ParserRuleContext ctx;
+
+ private String currentNode;
+
+ private String previousNode;
+
+ private boolean isTraversal = false;
+ private boolean isWhereQuery = false;
+ private boolean isUnionQuery = false;
+ private boolean isUnionStart = false;
+
+ private String whereStartNode = "";
+
+ private Deque<String> unionStartNodes = new LinkedList<String>();
+
+ /*
+ * Limit Queries have to be applied in the end - so i have to set this in
+ * context
+ */
+ StringBuilder limitQuery = new StringBuilder();
+
+ public ParserRuleContext getCtx() {
+ return ctx;
+ }
+
+ public void setCtx(ParserRuleContext ctx) {
+ this.ctx = ctx;
+ }
+
+ public String getCurrentNode() {
+ return currentNode;
+ }
+
+ public void setCurrentNode(String currentNode) {
+ this.currentNode = currentNode;
+ }
+
+ public String getPreviousNode() {
+ return previousNode;
+ }
+
+ public void setPreviousNode(String previousNode) {
+ this.previousNode = previousNode;
+ }
+
+ public boolean isTraversal() {
+ return isTraversal;
+ }
+
+ public void setTraversal(boolean isTraversal) {
+ this.isTraversal = isTraversal;
+ }
+
+ public boolean isWhereQuery() {
+ return isWhereQuery;
+ }
+
+ public void setWhereQuery(boolean isWhereQuery) {
+ this.isWhereQuery = isWhereQuery;
+ }
+
+ public boolean isUnionQuery() {
+ return isUnionQuery;
+ }
+
+ public void setUnionQuery(boolean isUnionQuery) {
+ this.isUnionQuery = isUnionQuery;
+ }
+
+ public String getWhereStartNode() {
+ return whereStartNode;
+ }
+
+ public void setWhereStartNode(String whereStartNode) {
+ this.whereStartNode = whereStartNode;
+ }
+
+ public Deque<String> getUnionStartNodes() {
+ return unionStartNodes;
+ }
+
+ public void setUnionStartNodes(Deque<String> unionStartNodes) {
+ this.unionStartNodes = unionStartNodes;
+ }
+
+ public boolean isUnionStart() {
+ return isUnionStart;
+ }
+
+ public void setUnionStart(boolean isUnionStart) {
+ this.isUnionStart = isUnionStart;
+ }
+
+ public StringBuilder getLimitQuery() {
+ return limitQuery;
+ }
+
+ public void setLimitQuery(StringBuilder limitQuery) {
+ this.limitQuery = limitQuery;
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslListener.java b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslListener.java
index 1b8341d..ecd04ac 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslListener.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslListener.java
@@ -20,16 +20,21 @@
package org.onap.aai.rest.dsl;
import java.util.ArrayList;
+import java.util.Deque;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.Map;
import java.util.List;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.onap.aai.AAIDslParser;
-import org.onap.aai.serialization.db.EdgeRules;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.exceptions.AAIException;
+import org.springframework.beans.factory.annotation.Autowired;
import org.onap.aai.AAIDslBaseListener;
-
+import org.onap.aai.edges.EdgeIngestor;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
@@ -39,246 +44,150 @@ import com.att.eelf.configuration.EELFManager;
public class DslListener extends AAIDslBaseListener {
private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DslQueryProcessor.class);
- private final EdgeRules edgeRules = EdgeRules.getInstance();
-
- protected List<String> list = null;
- //TODO Use StringBuilder to build the query than concat
- String query = "";
-
- Map<Integer, String> unionMap = new HashMap<>();
- Map<String, String> flags = new HashMap<>();
-
- String currentNode = "";
- String prevsNode = "";
- int commas = 0;
- int unionKey = 0;
- int unionMembers = 0;
- boolean isUnionBeg = false;
- boolean isUnionTraversal = false;
+ private final EdgeIngestor edgeRules;
- boolean isTraversal = false;
- boolean isWhereTraversal = false;
- String whereTraversalNode = "";
-
- String limitQuery = "";
- boolean isNot = false;
+ DslContext context = null;
+ DslQueryBuilder dslBuilder = null;
/**
* Instantiates a new DslListener.
*/
+ @Autowired
+ public DslListener(EdgeIngestor edgeIngestor) {
+ this.edgeRules = edgeIngestor;
+ context = new DslContext();
+ dslBuilder = new DslQueryBuilder(edgeIngestor);
+ }
- public DslListener() {
- list = new ArrayList<>();
+ public String getQuery() {
+ return dslBuilder.getQuery().toString();
}
@Override
public void enterAaiquery(AAIDslParser.AaiqueryContext ctx) {
- query += "builder";
+ dslBuilder.start();
}
@Override
- public void enterDslStatement(AAIDslParser.DslStatementContext ctx) {
- // LOGGER.info("Statement Enter"+ctx.getText());
- /*
- * This block of code is entered for every query statement
- */
- if (isUnionBeg) {
- isUnionBeg = false;
- isUnionTraversal = true;
-
- } else if (unionMembers > 0) {
- unionMembers--;
- query += ",builder.newInstance()";
- isUnionTraversal = true;
- }
-
+ public void exitAaiquery(AAIDslParser.AaiqueryContext ctx) {
+ dslBuilder.end(context);
}
@Override
- public void exitDslStatement(AAIDslParser.DslStatementContext ctx) {
- /*
- * Nothing to be done here for now
- * LOGGER.info("Statement Exit"+ctx.getText());
- */
+ public void enterDslStatement(AAIDslParser.DslStatementContext ctx) {
+ if (context.isUnionStart()) {
+ dslBuilder.startUnion();
+ }
}
@Override
- public void exitAaiquery(AAIDslParser.AaiqueryContext ctx) {
- /*
- * dedup is by default for all queries If the query has limit in it
- * include this as well LOGGER.info("Statement Exit"+ctx.getText());
- */
-
- query += ".cap('x').unfold().dedup()" + limitQuery;
+ public void exitDslStatement(AAIDslParser.DslStatementContext ctx) {
+ if (context.isUnionQuery()) {
+ dslBuilder.comma(context);
+ context.setUnionStart(true);
+ }
}
- /*
- * TODO: The contexts are not inherited from a single parent in AAIDslParser
- * Need to find a way to do that
- */
@Override
public void enterSingleNodeStep(AAIDslParser.SingleNodeStepContext ctx) {
-
- prevsNode = currentNode;
- currentNode = ctx.NODE().getText();
+ try {
+ /*
+ * Set the previous Node to current node and get the new current
+ * node
+ */
+ context.setPreviousNode(context.getCurrentNode());
+ context.setCurrentNode(ctx.NODE().getText());
- this.generateQuery();
- if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
- flags.put(currentNode, "store");
- }
+ if (context.isUnionQuery() || context.isTraversal() || context.isWhereQuery()) {
+ String oldPreviousNode = context.getPreviousNode();
- }
+ if (context.isUnionStart()) {
+ String previousNode = context.getUnionStartNodes().peek();
+ context.setPreviousNode(previousNode);
- @Override
- public void enterSingleQueryStep(AAIDslParser.SingleQueryStepContext ctx) {
-
- prevsNode = currentNode;
- currentNode = ctx.NODE().getText();
- this.generateQuery();
-
- if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
- flags.put(currentNode, "store");
- }
- }
+ context.setUnionStart(false);
+ }
- @Override
- public void enterMultiQueryStep(AAIDslParser.MultiQueryStepContext ctx) {
-
- prevsNode = currentNode;
- currentNode = ctx.NODE().getText();
- this.generateQuery();
-
- if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
- flags.put(currentNode, "store");
- }
+ dslBuilder.edgeQuery(context);
- }
+ /*
+ * Reset is required bcos for union queries im changing the
+ * context
+ */
+ context.setPreviousNode(oldPreviousNode);
- /*
- * Generates the QueryBuilder syntax for the dsl query
- */
- private void generateQuery() {
- String edgeType = "";
-
- if (isUnionTraversal || isTraversal || isWhereTraversal) {
- String previousNode = prevsNode;
- if (isUnionTraversal) {
- previousNode = unionMap.get(unionKey);
- isUnionTraversal = false;
}
- if (edgeRules.hasTreeEdgeRule(previousNode, currentNode)) {
- edgeType = "EdgeType.TREE";
- }else if (edgeRules.hasCousinEdgeRule(previousNode, currentNode, "")) {
- edgeType = "EdgeType.COUSIN";
- } else
- edgeType = "EdgeType.COUSIN";
-
- query += ".createEdgeTraversal(" + edgeType + ", '" + previousNode + "','" + currentNode + "')";
+ else {
+ dslBuilder.nodeQuery(context);
+ }
+ } catch (AAIException e) {
+ LOGGER.info("AAIException in DslListener" + e.getMessage());
}
- else
- query += ".getVerticesByProperty('aai-node-type', '" + currentNode + "')";
}
@Override
public void exitSingleNodeStep(AAIDslParser.SingleNodeStepContext ctx) {
-
- generateExitStep();
- }
-
- @Override
- public void exitSingleQueryStep(AAIDslParser.SingleQueryStepContext ctx) {
- generateExitStep();
- }
-
- @Override
- public void exitMultiQueryStep(AAIDslParser.MultiQueryStepContext ctx) {
- generateExitStep();
-
+ context.setCtx(ctx);
+ dslBuilder.store(context);
}
private void generateExitStep() {
- if (flags.containsKey(currentNode)) {
- String storeFlag = flags.get(currentNode);
- if (storeFlag != null && storeFlag.equals("store"))
- query += ".store('x')";
- flags.remove(currentNode);
- }
+
}
@Override
public void enterUnionQueryStep(AAIDslParser.UnionQueryStepContext ctx) {
- isUnionBeg = true;
- unionKey++;
- unionMap.put(unionKey, currentNode);
- query += ".union(builder.newInstance()";
+ Deque<String> unionStartNodes = context.getUnionStartNodes();
+ unionStartNodes.add(context.getCurrentNode());
- List<TerminalNode> commaNodes = ctx.COMMA();
+ context.setUnionStart(true);
+ /*
+ * I may not need this
+ */
+ context.setUnionQuery(true);
+ dslBuilder.union(context);
- for (TerminalNode node : commaNodes) {
- unionMembers++;
- }
}
@Override
public void exitUnionQueryStep(AAIDslParser.UnionQueryStepContext ctx) {
- isUnionBeg = false;
- unionMap.remove(unionKey);
+ context.setUnionStart(false);
+ context.setUnionQuery(false);
+ Deque<String> unionStartNodes = context.getUnionStartNodes();
+ if (unionStartNodes.peek() != null) {
+ unionStartNodes.pop();
+ }
- query += ")";
- unionKey--;
+ dslBuilder.endUnion(context);
}
@Override
public void enterFilterTraverseStep(AAIDslParser.FilterTraverseStepContext ctx) {
- isWhereTraversal = true;
- whereTraversalNode = currentNode;
- query += ".where(builder.newInstance()";
+ context.setWhereQuery(true);
+ context.setWhereStartNode(context.getCurrentNode());
+ dslBuilder.where(context);
+
}
@Override
public void exitFilterTraverseStep(AAIDslParser.FilterTraverseStepContext ctx) {
- query += ")";
- isWhereTraversal = false;
- currentNode = whereTraversalNode;
+ context.setWhereQuery(false);
+ context.setCurrentNode(context.getWhereStartNode());
+
+ dslBuilder.endWhere(context);
+
}
@Override
public void enterFilterStep(AAIDslParser.FilterStepContext ctx) {
- if (ctx.NOT() != null && ctx.NOT().getText().equals("!"))
- isNot = true;
-
- List<TerminalNode> nodes = ctx.KEY();
- String key = ctx.KEY(0).getText();
-
- if (isNot) {
- query += ".getVerticesExcludeByProperty(";
- isNot = false;
- } else
- query += ".getVerticesByProperty(";
-
- if (nodes.size() == 2) {
- query += key + "," + ctx.KEY(1).getText();
- query += ")";
- }
-
- if (nodes.size() > 2) {
-
- for (TerminalNode node : nodes) {
- if (node.getText().equals(key))
- continue;
-
- query += key + "," + node.getText();
- query += ")";
- }
-
- }
-
+ context.setCtx(ctx);
+ dslBuilder.filter(context);
}
@Override
@@ -288,17 +197,17 @@ public class DslListener extends AAIDslBaseListener {
@Override
public void enterTraverseStep(AAIDslParser.TraverseStepContext ctx) {
- isTraversal = true;
+ context.setTraversal(true);
}
@Override
public void exitTraverseStep(AAIDslParser.TraverseStepContext ctx) {
- isTraversal = false;
+ context.setTraversal(false);
}
@Override
public void enterLimitStep(AAIDslParser.LimitStepContext ctx) {
- String value = ctx.NODE().getText();
- limitQuery += ".limit(" + value + ")";
+ context.setCtx(ctx);
+ dslBuilder.limit(context);
}
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryBuilder.java b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryBuilder.java
new file mode 100644
index 0000000..59f4443
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryBuilder.java
@@ -0,0 +1,190 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.dsl;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.onap.aai.AAIDslBaseListener;
+import org.onap.aai.AAIDslParser;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.exceptions.AAIException;
+
+public class DslQueryBuilder {
+
+ private StringBuilder query;
+ private final EdgeIngestor edgeRules;
+
+ public DslQueryBuilder(EdgeIngestor edgeIngestor) {
+ this.edgeRules = edgeIngestor;
+ query = new StringBuilder();
+ }
+
+ public StringBuilder getQuery() {
+ return query;
+ }
+
+ public void setQuery(StringBuilder query) {
+ this.query = query;
+ }
+
+ public DslQueryBuilder start() {
+ query.append("builder");
+ return this;
+ }
+
+ public DslQueryBuilder startUnion() {
+ query.append("builder.newInstance()");
+ return this;
+ }
+
+ public DslQueryBuilder end(DslContext context) {
+ query.append(".cap('x').unfold().dedup()").append(context.getLimitQuery());
+ return this;
+ }
+
+ public DslQueryBuilder nodeQuery(DslContext context) {
+ query.append(".getVerticesByProperty('aai-node-type', '").append(context.getCurrentNode()).append("')");
+ return this;
+ }
+
+ public DslQueryBuilder edgeQuery(DslContext context) throws AAIException {
+ EdgeRuleQuery.Builder baseQ = new EdgeRuleQuery.Builder(context.getPreviousNode(), context.getCurrentNode());
+ String edgeType = "";
+ if (!edgeRules.hasRule(baseQ.build())) {
+ throw new AAIException("AAI_6120", "No EdgeRule found for passed nodeTypes: " + context.getPreviousNode()
+ + ", " + context.getCurrentNode());
+ } else if (edgeRules.hasRule(baseQ.edgeType(EdgeType.TREE).build())) {
+ edgeType = "EdgeType.TREE";
+ } else if (edgeRules.hasRule(baseQ.edgeType(EdgeType.COUSIN).build())) {
+ edgeType = "EdgeType.COUSIN";
+ } else
+ edgeType = "EdgeType.COUSIN";
+
+ query.append(".createEdgeTraversal(").append(edgeType).append(", '").append(context.getPreviousNode())
+ .append("','").append(context.getCurrentNode()).append("')");
+
+ return this;
+ }
+
+ public DslQueryBuilder where(DslContext context) {
+ query.append(".where(builder.newInstance()");
+ return this;
+ }
+
+ public DslQueryBuilder endWhere(DslContext context) {
+ query.append(")");
+ return this;
+ }
+
+ public DslQueryBuilder endUnion(DslContext context) {
+ /*
+ * Need to delete the last comma
+ */
+ if (query.toString().endsWith(",")) {
+ query.deleteCharAt(query.length() - 1);
+ }
+ query.append(")");
+ return this;
+ }
+
+ public DslQueryBuilder limit(DslContext context) {
+ /*
+ * limit queries are strange - You have to append in the end
+ */
+ AAIDslParser.LimitStepContext ctx = (AAIDslParser.LimitStepContext) context.getCtx();
+ context.setLimitQuery(new StringBuilder(".limit(").append(ctx.NODE().getText()).append(")"));
+ return this;
+ }
+
+ public DslQueryBuilder filter(DslContext context) {
+ return this.filterPropertyStart(context).filterPropertyKeys(context).filterPropertyEnd();
+
+ }
+
+ public DslQueryBuilder filterPropertyStart(DslContext context) {
+ AAIDslParser.FilterStepContext ctx = (AAIDslParser.FilterStepContext) context.getCtx();
+ if (ctx.NOT() != null && ctx.NOT().getText().equals("!"))
+ query.append(".getVerticesExcludeByProperty(");
+ else
+ query.append(".getVerticesByProperty(");
+
+ return this;
+
+ }
+
+ public DslQueryBuilder filterPropertyEnd() {
+ query.append(")");
+ return this;
+
+ }
+
+ public DslQueryBuilder filterPropertyKeys(DslContext context) {
+ AAIDslParser.FilterStepContext ctx = (AAIDslParser.FilterStepContext) context.getCtx();
+ final String key = ctx.KEY(0).getText();
+
+ query.append(key);
+
+ List<TerminalNode> nodes = ctx.KEY();
+ List<String> valuesArray = nodes.stream().filter((node) -> !key.equals(node.getText()))
+ .map((node) -> "'" + node.getText().replace("'", "").trim() + "'")
+ .collect(Collectors.toList());
+
+ /*
+ * The whole point of doing this to separate P.within from key-value search
+ * For a list of values QB uses P.within
+ * For just a single value QB uses key,value check
+ */
+ if (nodes.size() > 2) {
+ String values = String.join(",", valuesArray);
+ query.append(",").append(" new ArrayList<>(Arrays.asList(" + values.toString() + "))");
+ } else {
+ if (!valuesArray.isEmpty())
+ query.append(",").append(valuesArray.get(0).toString());
+ }
+ return this;
+ }
+
+ public DslQueryBuilder union(DslContext context) {
+ query.append(".union(");
+ return this;
+ }
+
+ public DslQueryBuilder store(DslContext context) {
+ AAIDslParser.SingleNodeStepContext ctx = (AAIDslParser.SingleNodeStepContext) context.getCtx();
+ if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
+ query.append(".store('x')");
+ }
+ return this;
+
+ }
+
+ public DslQueryBuilder comma(DslContext context) {
+ query.append(",");
+ return this;
+
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java
index 61f16d4..b2be402 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java
@@ -36,6 +36,8 @@ import org.antlr.v4.runtime.Token;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Autowired;
+
/**
* The Class DslQueryProcessor.
*/
@@ -43,6 +45,13 @@ public class DslQueryProcessor {
private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DslQueryProcessor.class);
+ private DslListener dslListener;
+
+ @Autowired
+ public DslQueryProcessor(DslListener dslListener){
+ this.dslListener = dslListener;
+ }
+
public String parseAaiQuery(String aaiQuery) {
try {
// Create a input stream that reads our string
@@ -64,34 +73,18 @@ public class DslQueryProcessor {
// Walk it and attach our listener
ParseTreeWalker walker = new ParseTreeWalker();
- DslListener listener = new DslListener();
- walker.walk(listener, ptree);
- LOGGER.info("Final QUERY" + listener.query);
+ walker.walk(dslListener, ptree);
+ LOGGER.info("Final QUERY" + dslListener.getQuery());
/*
* TODO - Visitor patternQueryDslVisitor visitor = new
* QueryDslVisitor(); String query = visitor.visit(ptree);
*
*/
- return listener.query;
+ return dslListener.getQuery();
} catch (Exception e) {
LOGGER.error("Error while processing the query"+e.getMessage());
}
return "";
}
-
- public static class Builder {
-
- /*
- * Builder constructor doesnt do anything
- */
- public Builder() {
- // Do nothing
- }
-
- public String build(String aaiQuery) {
-
- return new DslQueryProcessor().parseAaiQuery(aaiQuery);
- }
- }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
deleted file mode 100644
index aea9083..0000000
--- a/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.rest.retired;
-
-import java.util.ArrayList;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
-
-import io.swagger.jaxrs.PATCH;
-
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.restcore.RESTAPI;
-import org.onap.aai.util.AAIConfig;
-
-/**
- * The Class RetiredConsumer.
- */
-public abstract class RetiredConsumer extends RESTAPI {
-
- /**
- * Creates the message get.
- *
- * @param versionParam the version param
- * @param headers the headers
- * @param info the info
- * @param req the req
- * @return the response
- */
- @GET
- @Path("/{uri:.*}")
- public Response createMessageGet(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
- return createMessage(versionParam, headers, info, req);
- }
-
- /**
- * Creates the message delete.
- *
- * @param versionParam the version param
- * @param headers the headers
- * @param info the info
- * @param req the req
- * @return the response
- */
- @DELETE
- @Path("/{uri:.*}")
- public Response createMessageDelete(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
- return createMessage(versionParam, headers, info, req);
- }
-
- /**
- * Creates the message post.
- *
- * @param versionParam the version param
- * @param headers the headers
- * @param info the info
- * @param req the req
- * @return the response
- */
- @POST
- @Path("/{uri:.*}")
- public Response createMessagePost(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
- return createMessage(versionParam, headers, info, req);
- }
-
- @PATCH
- @Path("/{uri:.*}")
- public Response createMessagePatch(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
- return createMessage(versionParam, headers, info, req);
- }
- /**
- * Creates the message put.
- *
- * @param versionParam the version param
- * @param headers the headers
- * @param info the info
- * @param req the req
- * @return the response
- */
- @PUT
- @Path("/{uri:.*}")
- public Response createMessagePut(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
- return createMessage(versionParam, headers, info, req);
- }
-
-
- /**
- * Creates the message.
- *
- * @param versionParam the version param
- * @param headers the headers
- * @param info the info
- * @param req the req
- * @return the response
- */
- private Response createMessage(String versionParam, HttpHeaders headers, UriInfo info, HttpServletRequest req) {
- AAIException e = new AAIException("AAI_3007");
-
- ArrayList<String> templateVars = new ArrayList<String>();
-
- if (templateVars.size() == 0) {
- templateVars.add("PUT");
- templateVars.add(info.getPath().toString());
- templateVars.add(versionParam);
- templateVars.add(AAIConfig.get("aai.default.api.version", ""));
- }
-
- return Response
- .status(e.getErrorObject().getHTTPResponseCode())
- .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e,
- templateVars)).build();
- }
-}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/CustomQueryConfig.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/CustomQueryConfig.java
index 44420d5..2fa7ec1 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/CustomQueryConfig.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/CustomQueryConfig.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -22,36 +22,32 @@ package org.onap.aai.rest.search;
import java.util.List;
public class CustomQueryConfig {
-
- private String query;
- private List<String> queryOptionalProperties;
- private List<String> queryRequiredProperties;
-
- public CustomQueryConfig() {
- // used by GetCustomQueryConfig
- }
-
- public void setQuery(String query) {
- this.query = query;
- }
-
- public String getQuery() {
- return this.query;
- }
-
- public void setQueryOptionalProperties(List<String> queryOptionalProperties) {
- this.queryOptionalProperties = queryOptionalProperties;
- }
-
- public List<String> getQueryOptionalProperties() {
- return queryOptionalProperties;
- }
-
- public void setQueryRequiredProperties(List<String> queryRequiredProperties) {
- this.queryRequiredProperties = queryRequiredProperties;
- }
-
- public List<String> getQueryRequiredProperties() {
- return queryRequiredProperties;
- }
+ public CustomQueryConfig() {
+ // used by GetCustomQueryConfig
+ }
+
+
+ private String query;
+ private List<String> queryOptionalProperties;
+ private List<String> queryRequiredProperties;
+
+ public void setQuery(String query) {
+ this.query = query;
+ }
+ public String getQuery() {
+ return this.query;
+ }
+
+ public void setQueryOptionalProperties( List<String> queryOptionalProperties) {
+ this.queryOptionalProperties = queryOptionalProperties;
+ }
+ public List<String> getQueryOptionalProperties( ) {
+ return queryOptionalProperties;
+ }
+ public void setQueryRequiredProperties( List<String> queryRequiredProperties) {
+ this.queryRequiredProperties = queryRequiredProperties;
+ }
+ public List<String> getQueryRequiredProperties( ) {
+ return queryRequiredProperties;
+ }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
index 6831fb8..fd9d53b 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
@@ -19,46 +19,51 @@
*/
package org.onap.aai.rest.search;
-import java.io.FileNotFoundException;
-import java.net.URI;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
-
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
import org.apache.tinkerpop.gremlin.process.traversal.P;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.javatuples.Pair;
+import org.onap.aai.config.SpringContextAware;
+import org.onap.aai.exceptions.AAIException;
import org.onap.aai.query.builder.MissingOptionalParameter;
import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.restcore.search.GroovyQueryBuilderSingleton;
import org.onap.aai.restcore.util.URITools;
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.onap.aai.serialization.queryformats.SubGraphStyle;
-import jersey.repackaged.com.google.common.base.Joiner;
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.FileNotFoundException;
+import java.net.URI;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
public abstract class GenericQueryProcessor {
+ private static EELFLogger LOGGER = EELFManager.getInstance().getLogger(GenericQueryProcessor.class);
+
protected final Optional<URI> uri;
protected final MultivaluedMap<String, String> queryParams;
protected final Optional<Collection<Vertex>> vertices;
protected static Pattern p = Pattern.compile("query/(.*+)");
protected Optional<String> gremlin;
protected final TransactionalGraphEngine dbEngine;
- protected static GremlinServerSingleton gremlinServerSingleton = GremlinServerSingleton.getInstance();
+ protected GremlinServerSingleton gremlinServerSingleton;
protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance();
protected final boolean isGremlin;
- /* dsl parameters to store dsl query and to check
+ protected Optional<DslQueryProcessor> dslQueryProcessorOptional;
+ /* dsl parameters to store dsl query and to check
* if this is a DSL request
*/
protected Optional<String> dsl;
protected final boolean isDsl ;
-
+
protected GenericQueryProcessor(Builder builder) {
this.uri = builder.getUri();
this.dbEngine = builder.getDbEngine();
@@ -67,6 +72,8 @@ public abstract class GenericQueryProcessor {
this.isGremlin = builder.isGremlin();
this.dsl = builder.getDsl();
this.isDsl = builder.isDsl();
+ this.gremlinServerSingleton = builder.getGremlinServerSingleton();
+ this.dslQueryProcessorOptional = builder.getDslQueryProcessor();
if (uri.isPresent()) {
queryParams = URITools.getQueryMap(uri.get());
@@ -79,12 +86,12 @@ public abstract class GenericQueryProcessor {
protected List<Object> processSubGraph(SubGraphStyle style, GraphTraversal<?,?> g) {
final List<Object> resultVertices = new Vector<>();
- g.store("x");
+ g.store("y");
if (SubGraphStyle.prune.equals(style) || SubGraphStyle.star.equals(style)) {
g.barrier().bothE();
if (SubGraphStyle.prune.equals(style)) {
- g.where(__.otherV().where(P.within("x")));
+ g.where(__.otherV().where(P.within("y")));
}
g.dedup().subgraph("subGraph").cap("subGraph").map(x -> (Graph)x.get()).next().traversal().V().forEachRemaining(x -> {
resultVertices.add(x);
@@ -95,7 +102,7 @@ public abstract class GenericQueryProcessor {
return resultVertices;
}
- public List<Object> execute(SubGraphStyle style) {
+ public List<Object> execute(SubGraphStyle style) throws FileNotFoundException, AAIException {
final List<Object> resultVertices;
Pair<String, Map<String, Object>> tuple = this.createQuery();
@@ -113,7 +120,7 @@ public abstract class GenericQueryProcessor {
return resultVertices;
}
- protected Pair<String, Map<String, Object>> createQuery() {
+ protected Pair<String, Map<String, Object>> createQuery() throws AAIException {
Map<String, Object> params = new HashMap<>();
String query = "";
if (this.isGremlin) {
@@ -121,12 +128,13 @@ public abstract class GenericQueryProcessor {
}else if (this.isDsl) {
String dslUserQuery = dsl.get();
- String dslQuery = new DslQueryProcessor.Builder().build(dslUserQuery);
-
- query = queryBuilderSingleton.executeTraversal(dbEngine, dslQuery, params);
- String startPrefix = "g.V()";
- query = startPrefix + query;
-
+ if(dslQueryProcessorOptional.isPresent()){
+ String dslQuery = dslQueryProcessorOptional.get().parseAaiQuery(dslUserQuery);
+ query = queryBuilderSingleton.executeTraversal(dbEngine, dslQuery, params);
+ String startPrefix = "g.V()";
+ query = startPrefix + query;
+ }
+ LOGGER.debug("Converted to gremlin query\n {}", query);
}else {
Matcher m = p.matcher(uri.get().getPath());
String queryName = "";
@@ -163,7 +171,10 @@ public abstract class GenericQueryProcessor {
// We are binding the array dynamically to the groovy processor correctly
// This will fix the memory issue of the method size too big
// as statically creating a list string and passing is not appropriate
- params.put("startVertexes", vertices.get().toArray());
+
+ Object [] startVertices = vertices.get().toArray();
+
+ params.put("startVertexes", startVertices);
if (query == null) {
query = "";
@@ -178,6 +189,23 @@ public abstract class GenericQueryProcessor {
} else {
query = startPrefix;
}
+
+ // Getting all the vertices and logging them is not reasonable
+ // As it could have performance impacts so doing a check here
+ // to see if the logger is trace so only print the start vertexes
+ // otherwise we would like to see what the gremlin query that was converted
+ // So to check if the output matches the desired behavior
+ // This way if to enable deeper logging, just changing logback would work
+ if(LOGGER.isTraceEnabled()){
+ String readQuery = query.replaceAll("startVertexes",
+ Arrays.toString(startVertices).replaceAll("[^0-9,]", ""));
+ LOGGER.trace("Converted to gremlin query including the start vertices \n {}", readQuery);
+ }
+ else if(LOGGER.isDebugEnabled()){
+ LOGGER.debug("Converted to gremlin query without the start vertices \n {}", query);
+ }
+ } else {
+ throw new AAIException("AAI_6148");
}
}
@@ -196,9 +224,15 @@ public abstract class GenericQueryProcessor {
private Optional<String> dsl = Optional.empty();
private boolean isDsl = false;
+ private DslQueryProcessor dslQueryProcessor;
+ private GremlinServerSingleton gremlinServerSingleton;
+ private Optional<String> nodeType = Optional.empty();
+ private boolean isNodeTypeQuery = false;
+ protected MultivaluedMap<String, String> uriParams;
- public Builder(TransactionalGraphEngine dbEngine) {
+ public Builder(TransactionalGraphEngine dbEngine, GremlinServerSingleton gremlinServerSingleton) {
this.dbEngine = dbEngine;
+ this.gremlinServerSingleton = gremlinServerSingleton;
}
public Builder queryFrom(URI uri) {
@@ -222,6 +256,15 @@ public abstract class GenericQueryProcessor {
this.dsl = Optional.of(query);
this.isDsl = true;
}
+ if(queryType.equals("nodeQuery")){
+ this.nodeType = Optional.of(query);
+ this.isNodeTypeQuery = true;
+ }
+ return this;
+ }
+
+ public Builder uriParams(MultivaluedMap<String, String> uriParams) {
+ this.uriParams = uriParams;
return this;
}
@@ -229,6 +272,15 @@ public abstract class GenericQueryProcessor {
this.processorType = type;
return this;
}
+
+ public Builder queryProcessor(DslQueryProcessor dslQueryProcessor){
+ this.dslQueryProcessor = dslQueryProcessor;
+ return this;
+ }
+
+ public Optional<DslQueryProcessor> getDslQueryProcessor(){
+ return Optional.ofNullable(this.dslQueryProcessor);
+ }
public TransactionalGraphEngine getDbEngine() {
return dbEngine;
}
@@ -260,16 +312,24 @@ public abstract class GenericQueryProcessor {
public QueryProcessorType getProcessorType() {
return processorType;
}
+
+ public GremlinServerSingleton getGremlinServerSingleton(){
+ return gremlinServerSingleton;
+ }
+
+ public Optional<String> getNodeType() {
+ return nodeType;
+ }
+
+ public boolean isNodeTypeQuery() {
+ return isNodeTypeQuery;
+ }
public GenericQueryProcessor create() {
-
- if (this.getProcessorType().equals(QueryProcessorType.GREMLIN_SERVER)) {
- return new GremlinServerImpl(this);
- } else if (this.getProcessorType().equals(QueryProcessorType.LOCAL_GROOVY)) {
- return new GroovyShellImpl(this);
- } else {
- return new GremlinServerImpl(this);
+ if (isNodeTypeQuery()) {
+ return new NodeQueryProcessor(this);
}
+ return new GroovyShellImpl(this);
}
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GetCustomQueryConfig.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GetCustomQueryConfig.java
index 2017f10..7227815 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GetCustomQueryConfig.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GetCustomQueryConfig.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -57,122 +57,122 @@ import com.google.gson.reflect.TypeToken;
public class GetCustomQueryConfig {
- private JsonArray storedQueries = null;
- private CustomQueryConfig customQueryConfig;
+ private JsonArray storedQueries = null;
+ private CustomQueryConfig customQueryConfig;
+
+
+ private final static String QUERY_CONFIG = "query";
+ private final static String REQUIRED_CONFIG = "required-properties";
+ private final static String OPTIONAL_CONFIG = "optional-properties";
+ private final static String STORED_QUERIES_CONFIG = "stored-queries";
+ private final static String STORED_QUERY_CONFIG = "stored-query";
+
+// public static final String AAI_HOME_ETC_QUERY_JSON = AAIConstants.AAI_HOME_ETC + "query" + AAIConstants.AAI_FILESEP + "stored-queries.json";
+
+ public GetCustomQueryConfig(String customQueryJson ) {
+ init(customQueryJson);
+ }
+
+ private void init( String customQueryJson) {
+ JsonParser parser = new JsonParser();
+ JsonObject queriesObject = parser.parse(customQueryJson).getAsJsonObject();
+ if (queriesObject.has(STORED_QUERIES_CONFIG)) {
+
+ storedQueries = queriesObject.getAsJsonArray(STORED_QUERIES_CONFIG);
+ }
+ }
+
+ private List<String> toStringList(JsonArray array) {
+ Gson converter = new Gson();
+ Type listType = new TypeToken<List<String>>() {}.getType();
+ return converter.fromJson(array, listType);
+ }
+
+ private List<String> getPropertyList(JsonObject configObject, String config ) {
+ JsonElement subqueryConfig;
+ JsonArray props;
+
+ if ( configObject.has(config)) {
+ subqueryConfig = configObject.get(config);
+ if ( subqueryConfig != null && !subqueryConfig.isJsonNull() ) {
+ props = subqueryConfig.getAsJsonArray();
+ if ( props != null ) {
+ return toStringList(props);
+ }
+ }
+ }
+ return toStringList(null);
+ }
+
+ private String getPropertyString(JsonObject configObject, String config) {
+ JsonElement subqueryConfig;
+
+ if ( configObject.has(config)) {
+ subqueryConfig = configObject.get(config);
+ if ( subqueryConfig != null && !subqueryConfig.isJsonNull() ) {
+ return subqueryConfig.getAsString();
+ }
+ }
+ return null;
+ }
+
+ private void getStoredQueryBlock( JsonObject configObject, String config ) {
+ if ( !configObject.has(config)) {
+ customQueryConfig.setQueryRequiredProperties( new ArrayList<String>() );
+ customQueryConfig.setQueryOptionalProperties( new ArrayList<String>() );
+ return;
+ }
+
+ JsonElement queryConfig;
+ JsonObject subObject;
+ String multipleStartNodes;
+ List<String> propertyList;
+
+ queryConfig = configObject.get(config);
+ subObject = queryConfig.getAsJsonObject();
+ propertyList = getPropertyList(subObject, REQUIRED_CONFIG);
+ if ( propertyList == null ) {
+ propertyList = new ArrayList<String>();
+ }
+ customQueryConfig.setQueryRequiredProperties( propertyList );
+ propertyList = getPropertyList(subObject, OPTIONAL_CONFIG);
+ if ( propertyList == null ) {
+ propertyList = new ArrayList<String>();
+ }
+ customQueryConfig.setQueryOptionalProperties( propertyList );
+
+ }
+
+
+ public CustomQueryConfig getStoredQuery(String queryName ) {
+
+ customQueryConfig = null;
+ JsonObject configObject;
+ JsonElement query;
+ JsonElement queryConfig;
+ String queryString;
+
+ for (JsonElement storedQuery : storedQueries) {
+ if (storedQuery.isJsonObject()) {
+ JsonObject queryObject = storedQuery.getAsJsonObject();
+ query = queryObject.get(queryName);
+ if ( query != null ) {
+ customQueryConfig = new CustomQueryConfig();
+ configObject = query.getAsJsonObject();
+ getStoredQueryBlock(configObject, QUERY_CONFIG);
+ if ( configObject.has(STORED_QUERY_CONFIG)) {
+ queryConfig = configObject.get(STORED_QUERY_CONFIG);
+ customQueryConfig.setQuery(queryConfig.getAsString());
+ }
+ break;
+ }
+ }
+ }
+
+ return customQueryConfig;
+
+ }
- private static final String QUERY_CONFIG = "query";
- private static final String REQUIRED_CONFIG = "required-properties";
- private static final String OPTIONAL_CONFIG = "optional-properties";
- private static final String STORED_QUERIES_CONFIG = "stored-queries";
- private static final String STORED_QUERY_CONFIG = "stored-query";
-
- public static final String AAI_HOME_ETC_QUERY_JSON =
- AAIConstants.AAI_HOME_ETC + "query" + AAIConstants.AAI_FILESEP + "stored-queries.json";
-
- public GetCustomQueryConfig(String customQueryJson) {
- init(customQueryJson);
- }
-
- private void init(String customQueryJson) {
- JsonParser parser = new JsonParser();
- JsonObject queriesObject = parser.parse(customQueryJson).getAsJsonObject();
- if (queriesObject.has(STORED_QUERIES_CONFIG)) {
-
- storedQueries = queriesObject.getAsJsonArray(STORED_QUERIES_CONFIG);
- }
- }
-
- private List<String> toStringList(JsonArray array) {
- Gson converter = new Gson();
- Type listType = new TypeToken<List<String>>() {
- }.getType();
- return converter.fromJson(array, listType);
- }
-
- private List<String> getPropertyList(JsonObject configObject, String config) {
- JsonElement subqueryConfig;
- JsonArray props;
-
- if (configObject.has(config)) {
- subqueryConfig = configObject.get(config);
- if (subqueryConfig != null && !subqueryConfig.isJsonNull()) {
- props = subqueryConfig.getAsJsonArray();
- if (props != null) {
- return toStringList(props);
- }
- }
- }
- return toStringList(null);
- }
-
- private String getPropertyString(JsonObject configObject, String config) {
- JsonElement subqueryConfig;
-
- if (configObject.has(config)) {
- subqueryConfig = configObject.get(config);
- if (subqueryConfig != null && !subqueryConfig.isJsonNull()) {
- return subqueryConfig.getAsString();
- }
- }
- return null;
- }
-
- private void getStoredQueryBlock(JsonObject configObject, String config) {
- if (!configObject.has(config)) {
- customQueryConfig.setQueryRequiredProperties(new ArrayList<String>());
- customQueryConfig.setQueryOptionalProperties(new ArrayList<String>());
- return;
- }
-
- JsonElement queryConfig;
- JsonObject subObject;
- String multipleStartNodes;
- List<String> propertyList;
-
- queryConfig = configObject.get(config);
- subObject = queryConfig.getAsJsonObject();
- propertyList = getPropertyList(subObject, REQUIRED_CONFIG);
- if (propertyList == null) {
- propertyList = new ArrayList<String>();
- }
- customQueryConfig.setQueryRequiredProperties(propertyList);
- propertyList = getPropertyList(subObject, OPTIONAL_CONFIG);
- if (propertyList == null) {
- propertyList = new ArrayList<String>();
- }
- customQueryConfig.setQueryOptionalProperties(propertyList);
-
- }
-
-
- public CustomQueryConfig getStoredQuery(String queryName) {
-
- customQueryConfig = null;
- JsonObject configObject;
- JsonElement query;
- JsonElement queryConfig;
- String queryString;
-
- for (JsonElement storedQuery : storedQueries) {
- if (storedQuery.isJsonObject()) {
- JsonObject queryObject = storedQuery.getAsJsonObject();
- query = queryObject.get(queryName);
- if (query != null) {
- customQueryConfig = new CustomQueryConfig();
- configObject = query.getAsJsonObject();
- getStoredQueryBlock(configObject, QUERY_CONFIG);
- if (configObject.has(STORED_QUERY_CONFIG)) {
- queryConfig = configObject.get(STORED_QUERY_CONFIG);
- customQueryConfig.setQuery(queryConfig.getAsString());
- }
- break;
- }
- }
- }
-
- return customQueryConfig;
-
- }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java
deleted file mode 100644
index 30e876a..0000000
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.rest.search;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
-import org.codehaus.groovy.ast.ClassHelper;
-import org.codehaus.groovy.ast.expr.ClassExpression;
-import org.codehaus.groovy.ast.expr.PropertyExpression;
-import org.codehaus.groovy.control.CompilerConfiguration;
-import org.codehaus.groovy.control.customizers.ASTTransformationCustomizer;
-import org.codehaus.groovy.control.customizers.ImportCustomizer;
-
-import groovy.lang.Binding;
-import groovy.lang.GroovyShell;
-import groovy.lang.Script;
-import groovy.transform.TimedInterrupt;
-
-/**
- * Creates and returns a groovy shell with the
- * configuration to statically import graph classes
- *
- */
-public class GremlinGroovyShellSingleton {
-
- private final GroovyShell shell;
- private GremlinGroovyShellSingleton() {
- Map<String, Object> parameters = new HashMap<>();
- parameters.put("value", 30000);
- parameters.put("unit", new PropertyExpression(new ClassExpression(ClassHelper.make(TimeUnit.class)),"MILLISECONDS"));
-
- ASTTransformationCustomizer custom = new ASTTransformationCustomizer(parameters, TimedInterrupt.class);
- ImportCustomizer imports = new ImportCustomizer();
- imports.addStaticStars(
- "org.apache.tinkerpop.gremlin.process.traversal.P"
- );
- imports.addImports(
- "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__",
- "org.apache.tinkerpop.gremlin.structure.T",
- "org.apache.tinkerpop.gremlin.process.traversal.P");
- CompilerConfiguration config = new CompilerConfiguration();
- config.addCompilationCustomizers(custom, imports);
-
- this.shell = new GroovyShell(config);
- }
-
- private static class Helper {
- private static final GremlinGroovyShellSingleton INSTANCE = new GremlinGroovyShellSingleton();
- }
-
- public static GremlinGroovyShellSingleton getInstance() {
-
- return Helper.INSTANCE;
- }
-
- /**
- * @param traversal
- * @param params
- * @return result of graph traversal
- */
- public GraphTraversal<?, ?> executeTraversal (String traversal, Map<String, Object> params) {
- Binding binding = new Binding(params);
- Script script = shell.parse(traversal);
- script.setBinding(binding);
- return (GraphTraversal<?, ?>) script.run();
- }
-}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java
deleted file mode 100644
index a059b04..0000000
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.rest.search;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Vector;
-
-import org.apache.tinkerpop.gremlin.driver.Client;
-import org.apache.tinkerpop.gremlin.driver.Cluster;
-import org.apache.tinkerpop.gremlin.driver.ResultSet;
-import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
-import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
-
-import org.onap.aai.util.AAIConfig;
-
-public class GremlinServerImpl extends GenericQueryProcessor {
-
-
- protected GremlinServerImpl(Builder builder) {
- super(builder);
- }
-
-
- @Override
- protected GraphTraversal<?,?> runQuery(String query, Map<String, Object> params) {
-
- //must force them into ids because of serialization issue with
- //tinkerpop-3.0.1-incubating
- query += ".id()";
- String rebindGraph = AAIConfig.get("aai.server.rebind", "g");
-
- if(!"g".equals(rebindGraph)){
- query = query.replaceFirst("g\\.V\\(", rebindGraph + ".V(");
- }
-
- Cluster cluster = gremlinServerSingleton.getCluster();
- Client client = cluster.connect();
-
- ResultSet results = client.submit(query, params);
-
-
- List<Object> vIds = new Vector<>();
- results.stream().forEach(x -> {
- Object obj = x.getObject();
- vIds.add(obj);
- });
-
- client.close();
-
- if (vIds.isEmpty()) {
- return __.start();
- } else {
- return this.dbEngine.asAdmin().getTraversalSource().V(vIds.toArray());
- }
- }
-
-}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java
index a39bc03..20a18d9 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -25,7 +25,9 @@ import org.onap.aai.util.FileWatcher;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import org.apache.tinkerpop.gremlin.driver.Cluster;
+import org.springframework.beans.factory.annotation.Value;
+import javax.annotation.PostConstruct;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
@@ -42,23 +44,13 @@ public class GremlinServerSingleton {
private static EELFLogger logger = EELFManager.getInstance().getLogger(GremlinServerSingleton.class);
- private Cluster cluster;
private boolean timerSet;
private Timer timer;
- private GetCustomQueryConfig queryConfig;
-
- private static class Helper {
-
- private static final GremlinServerSingleton INSTANCE = new GremlinServerSingleton();
- }
- private GremlinServerSingleton() {
- init();
- }
+ private GetCustomQueryConfig queryConfig;
- public static GremlinServerSingleton getInstance() {
- return Helper.INSTANCE;
- }
+ @Value("${schema.queries.location}")
+ private String storedQueriesLocation;
/**
* Initializes the gremlin server singleton
@@ -67,67 +59,60 @@ public class GremlinServerSingleton {
* Then creates a file watcher to watch the file every ten seconds
* and if there is a change in the file, then reloads the file into
* the properties object
+ *
*/
- private void init() {
-
- try {
- cluster = Cluster.build(new File(AAIConstants.AAI_HOME_ETC_APP_PROPERTIES + "gremlin-server-config.yaml"))
- .maxContentLength(6537920)
- .create();
- } catch (FileNotFoundException e) {
- logger.error("Unable to find the file: " + LogFormatTools.getStackTop(e));
- }
+ @PostConstruct
+ public void init() {
- try {
- String filepath = GetCustomQueryConfig.AAI_HOME_ETC_QUERY_JSON;
- Path path = Paths.get(filepath);
- String customQueryConfigJson = new String(Files.readAllBytes(path));
+ try {
+ String filepath = storedQueriesLocation + AAIConstants.AAI_FILESEP + "stored-queries.json";
+ Path path = Paths.get(filepath);
+ String customQueryConfigJson = new String(Files.readAllBytes(path));
+
- queryConfig = new GetCustomQueryConfig(customQueryConfigJson);
- } catch (IOException e) {
- logger.error("Error occurred during the processing of query json file: " + LogFormatTools.getStackTop(e));
- }
+ queryConfig = new GetCustomQueryConfig(customQueryConfigJson);
+ } catch (IOException e) {
+ logger.error("Error occurred during the processing of query json file: " + LogFormatTools.getStackTop(e));
+ }
- TimerTask task = new FileWatcher(new File(GetCustomQueryConfig.AAI_HOME_ETC_QUERY_JSON)) {
+
+ TimerTask task = new FileWatcher(new File(storedQueriesLocation)) {
@Override
protected void onChange(File file) {
- try {
- String filepath = GetCustomQueryConfig.AAI_HOME_ETC_QUERY_JSON;
- Path path = Paths.get(filepath);
- String customQueryConfigJson = new String(Files.readAllBytes(path));
- queryConfig = new GetCustomQueryConfig(customQueryConfigJson);
- } catch (IOException e) {
- logger.error(
- "Error occurred during the processing of query json file: " + LogFormatTools.getStackTop(e));
- }
+ try {
+ String filepath = storedQueriesLocation;
+ Path path = Paths.get(filepath);
+ String customQueryConfigJson = new String(Files.readAllBytes(path));
+ queryConfig = new GetCustomQueryConfig(customQueryConfigJson);
+ } catch (IOException e) {
+ logger.error("Error occurred during the processing of query json file: " + LogFormatTools.getStackTop(e));
+ }
}
};
if (!timerSet) {
timerSet = true;
timer = new Timer();
- timer.schedule(task, new Date(), 10000);
+ timer.schedule( task , new Date(), 10000 );
}
}
- public Cluster getCluster() {
- return cluster;
- }
-
/**
* Gets the query using CustomQueryConfig
+ * @param key
+ * @return
*/
- public String getStoredQueryFromConfig(String key) {
- CustomQueryConfig customQueryConfig = queryConfig.getStoredQuery(key);
- if (customQueryConfig == null) {
- return null;
- }
- return customQueryConfig.getQuery();
+ public String getStoredQueryFromConfig(String key){
+ CustomQueryConfig customQueryConfig = queryConfig.getStoredQuery(key);
+ if ( customQueryConfig == null ) {
+ return null;
+ }
+ return customQueryConfig.getQuery();
}
-
+
public CustomQueryConfig getCustomQueryConfig(String key) {
- return queryConfig.getStoredQuery(key);
+ return queryConfig.getStoredQuery(key);
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java
deleted file mode 100644
index b1a2b5f..0000000
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.rest.search;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.tinkerpop.gremlin.structure.Vertex;
-import org.codehaus.groovy.ast.ClassHelper;
-import org.codehaus.groovy.ast.expr.ClassExpression;
-import org.codehaus.groovy.ast.expr.PropertyExpression;
-import org.codehaus.groovy.control.CompilerConfiguration;
-import org.codehaus.groovy.control.customizers.ASTTransformationCustomizer;
-import org.codehaus.groovy.control.customizers.ImportCustomizer;
-import org.onap.aai.query.builder.QueryBuilder;
-import org.onap.aai.serialization.engines.QueryStyle;
-import org.onap.aai.serialization.engines.TransactionalGraphEngine;
-
-import groovy.lang.Binding;
-import groovy.lang.GroovyShell;
-import groovy.lang.Script;
-import groovy.transform.TimedInterrupt;
-
-/**
- * Creates and returns a groovy shell with the
- * configuration to statically import graph classes
- *
- */
-public class GroovyQueryBuilderSingleton {
-
- private final GroovyShell shell;
- private GroovyQueryBuilderSingleton() {
- Map<String, Object> parameters = new HashMap<>();
- parameters.put("value", 30000);
- parameters.put("unit", new PropertyExpression(new ClassExpression(ClassHelper.make(TimeUnit.class)),"MILLISECONDS"));
-
- ASTTransformationCustomizer custom = new ASTTransformationCustomizer(parameters, TimedInterrupt.class);
- ImportCustomizer imports = new ImportCustomizer();
- imports.addStaticStars(
- "org.apache.tinkerpop.gremlin.process.traversal.P"
- );
- imports.addImports(
- "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__",
- "org.apache.tinkerpop.gremlin.structure.T",
- "org.apache.tinkerpop.gremlin.process.traversal.P",
- "org.onap.aai.serialization.db.EdgeType");
- CompilerConfiguration config = new CompilerConfiguration();
- config.addCompilationCustomizers(custom, imports);
-
- this.shell = new GroovyShell(config);
- }
-
- private static class Helper {
- private static final GroovyQueryBuilderSingleton INSTANCE = new GroovyQueryBuilderSingleton();
- }
-
- public static GroovyQueryBuilderSingleton getInstance() {
-
- return Helper.INSTANCE;
- }
-
- /**
- * @param traversal
- * @param params
- * @return result of graph traversal
- */
- public String executeTraversal (TransactionalGraphEngine engine, String traversal, Map<String, Object> params) {
- QueryBuilder<Vertex> builder = engine.getQueryBuilder(QueryStyle.GREMLIN_TRAVERSAL);
- Binding binding = new Binding(params);
- binding.setVariable("builder", builder);
- Script script = shell.parse(traversal);
- script.setBinding(binding);
- script.run();
-
- return builder.getQuery();
- }
-}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java
index 0a6c1a4..4102c52 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java
@@ -30,8 +30,11 @@ import org.onap.aai.logging.LoggingContext;
import org.onap.aai.logging.StopWatch;
import org.onap.aai.restcore.HttpMethod;
import org.onap.aai.restcore.RESTAPI;
-import org.onap.aai.util.AAIApiVersion;
+import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.TraversalConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.onap.aai.concurrent.AaiCallable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.POST;
@@ -41,29 +44,35 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.core.*;
import javax.ws.rs.core.Response.Status;
import java.util.ArrayList;
-import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
/**
* Implements the search subdomain in the REST API. All API calls must include
* X-FromAppId and X-TransactionId in the header.
- *
-
*
*/
-
@Path("/search")
public class ModelAndNamedQueryRestProvider extends RESTAPI {
-
- protected static String authPolicyFunctionName = "search";
-
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ModelAndNamedQueryRestProvider.class);
+
public static final String NAMED_QUERY = "/named-query";
public static final String MODEL_QUERY = "/model";
public static final String TARGET_ENTITY = "DB";
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ModelAndNamedQueryRestProvider.class);
+
+ private SearchGraph searchGraph;
+
+ private SchemaVersions schemaVersions;
+
+ @Autowired
+ public ModelAndNamedQueryRestProvider(SearchGraph searchGraph, SchemaVersions schemaVersions){
+ this.searchGraph = searchGraph;
+ this.schemaVersions = schemaVersions;
+ }
+
/**
* Gets the named query response.
*
@@ -80,15 +89,15 @@ public class ModelAndNamedQueryRestProvider extends RESTAPI {
@Context HttpServletRequest req,
String queryParameters,
@Context UriInfo info) {
- return runner(AAIConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_APP,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
headers,
info,
HttpMethod.GET,
- new Callable<Response>() {
+ new AaiCallable<Response>() {
@Override
- public Response call() {
+ public Response process() {
return processNamedQueryResponse(headers, req, queryParameters);
}
}
@@ -117,12 +126,11 @@ public class ModelAndNamedQueryRestProvider extends RESTAPI {
AAIExtensionMap aaiExtMap = new AAIExtensionMap();
aaiExtMap.setHttpHeaders(headers);
aaiExtMap.setServletRequest(req);
- aaiExtMap.setApiVersion(AAIApiVersion.get());
+ aaiExtMap.setApiVersion(schemaVersions.getDefaultVersion().toString());
String realTime = headers.getRequestHeaders().getFirst("Real-Time");
//only consider header value for search
DBConnectionType type = this.determineConnectionType("force-cache", realTime);
- SearchGraph searchGraph = new SearchGraph();
LoggingContext.startTime();
StopWatch.conditionalStart();
@@ -187,15 +195,15 @@ public class ModelAndNamedQueryRestProvider extends RESTAPI {
String inboundPayload,
@QueryParam("action") String action,
@Context UriInfo info) {
- return runner(AAIConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_APP,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
headers,
info,
HttpMethod.GET,
- new Callable<Response>() {
+ new AaiCallable<Response>() {
@Override
- public Response call() {
+ public Response process() {
return processModelQueryResponse(headers, req, inboundPayload, action);
}
}
@@ -226,7 +234,7 @@ public class ModelAndNamedQueryRestProvider extends RESTAPI {
AAIExtensionMap aaiExtMap = new AAIExtensionMap();
aaiExtMap.setHttpHeaders(headers);
aaiExtMap.setServletRequest(req);
- aaiExtMap.setApiVersion(AAIApiVersion.get());
+ aaiExtMap.setApiVersion(schemaVersions.getDefaultVersion().toString());
aaiExtMap.setFromAppId(fromAppId);
aaiExtMap.setTransId(transId);
@@ -234,7 +242,6 @@ public class ModelAndNamedQueryRestProvider extends RESTAPI {
//only consider header value for search
DBConnectionType type = this.determineConnectionType("force-cache", realTime);
- SearchGraph searchGraph = new SearchGraph();
LoggingContext.startTime();
StopWatch.conditionalStart();
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/NodeQueryProcessor.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/NodeQueryProcessor.java
new file mode 100644
index 0000000..0126162
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/NodeQueryProcessor.java
@@ -0,0 +1,116 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.search;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import edu.emory.mathcs.backport.java.util.concurrent.TimeUnit;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.restcore.search.GroovyQueryBuilderSingleton;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+
+import java.io.FileNotFoundException;
+import java.util.*;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.MultivaluedMap;
+
+public class NodeQueryProcessor extends GroovyShellImpl {
+
+ private static EELFLogger LOGGER = EELFManager.getInstance().getLogger(NodeQueryProcessor.class);
+
+ protected String nodeType;
+ private MultivaluedMap<String, String> nodeQueryParams = new MultivaluedHashMap<String, String>();
+ protected final Optional<Collection<Vertex>> vertices;
+ protected static Pattern p = Pattern.compile("query/(.*+)");
+ protected Optional<String> gremlin;
+ protected final TransactionalGraphEngine dbEngine;
+ protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance();;
+
+ protected NodeQueryProcessor(Builder builder) {
+ super(builder);
+ this.nodeQueryParams = builder.uriParams;
+ if(builder.getNodeType().isPresent())
+ this.nodeType = builder.getNodeType().get();
+ this.dbEngine = builder.getDbEngine();
+ this.vertices = builder.getVertices();
+
+ }
+
+ public Pair<String, Map<String, Object>> createQuery() throws AAIException {
+ Map<String, Object> params = new HashMap<>();
+
+ Long timeNowInMilliSecs = System.currentTimeMillis();
+ Long startTime = 0L;
+ if(nodeQueryParams.containsKey("hours")){
+ Long hoursInMilliSec = TimeUnit.HOURS.toMillis(Long.parseLong(nodeQueryParams.getFirst("hours")));
+ startTime = timeNowInMilliSecs - hoursInMilliSec;
+ }
+ else if(nodeQueryParams.containsKey("date-time")){
+ Long dateTime = Long.parseLong(nodeQueryParams.getFirst("date-time"));
+ startTime = dateTime;
+ }
+
+ String query = "builder.getVerticesByProperty('aai-node-type', nodeType)"
+ + ".or(builder.newInstance().getVerticesGreaterThanProperty('aai-created-ts',startTime),"
+ + " builder.newInstance().getVerticesGreaterThanProperty('aai-last-mod-ts',startTime)" + ")";
+
+ params.put("startTime", startTime);
+ params.put("nodeType", nodeType);
+
+ query = queryBuilderSingleton.executeTraversal(dbEngine, query, params);
+
+ String startPrefix = "g.V()";
+
+ query = startPrefix + query;
+
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Converted to gremlin query without the start vertices \n {}", query);
+ }
+
+ return new Pair<>(query, params);
+ }
+
+ public List<Object> execute(SubGraphStyle style) throws FileNotFoundException, AAIException {
+ final List<Object> resultVertices = new Vector<>();
+
+ Pair<String, Map<String, Object>> tuple = this.createQuery();
+ String query = tuple.getValue0();
+ Map<String, Object> params = tuple.getValue1();
+
+ if (query.equals("")) {
+ // nothing to do, just exit
+ return new ArrayList<>();
+ }
+ GraphTraversal<?, ?> g = this.runQuery(query, params);
+
+ resultVertices.addAll(g.toList());
+
+ return resultVertices;
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java
index 4973060..f61e342 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java
@@ -22,7 +22,6 @@ package org.onap.aai.rest.search;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.Callable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
@@ -37,18 +36,15 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
-import org.onap.aai.db.props.AAIProperties;
import org.onap.aai.dbgraphmap.SearchGraph;
import org.onap.aai.dbmap.DBConnectionType;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.ModelType;
-import org.onap.aai.introspection.Version;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.logging.LoggingContext;
import org.onap.aai.logging.StopWatch;
-import org.onap.aai.logging.LoggingContext.StatusCode;
import org.onap.aai.restcore.HttpMethod;
import org.onap.aai.restcore.RESTAPI;
import org.onap.aai.serialization.db.DBSerializer;
@@ -56,29 +52,53 @@ import org.onap.aai.serialization.engines.QueryStyle;
import org.onap.aai.serialization.engines.JanusGraphDBEngine;
import org.onap.aai.serialization.engines.TransactionalGraphEngine;
import org.onap.aai.serialization.queryformats.utils.UrlBuilder;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.TraversalConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.onap.aai.concurrent.AaiCallable;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
+import org.springframework.beans.factory.annotation.Value;
+
/**
* Implements the search subdomain in the REST API. All API calls must include
* X-FromAppId and X-TransactionId in the header.
- *
-
- *
*/
-
-@Path("/{version: v[789]|v1[01234]|latest}/search")
+@Path("/{version: v[1-9][0-9]*|latest}/search")
public class SearchProvider extends RESTAPI {
-
- protected static String authPolicyFunctionName = "search";
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(SearchProvider.class);
public static final String GENERIC_QUERY = "/generic-query";
public static final String NODES_QUERY = "/nodes-query";
public static final String TARGET_ENTITY = "DB";
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(SearchProvider.class);
+
+ private SearchGraph searchGraph;
+
+ private LoaderFactory loaderFactory;
+
+ private SchemaVersions schemaVersions;
+
+ private String basePath;
+
+ @Autowired
+ public SearchProvider(
+ LoaderFactory loaderFactory,
+ SearchGraph searchGraph,
+ SchemaVersions schemaVersions,
+ @Value("${schema.uri.base.path}") String basePath
+ ){
+ this.loaderFactory = loaderFactory;
+ this.searchGraph = searchGraph;
+ this.schemaVersions = schemaVersions;
+ this.basePath = basePath;
+ }
+
/**
* Gets the generic query response.
*
@@ -103,15 +123,15 @@ public class SearchProvider extends RESTAPI {
@PathParam("version")String versionParam,
@Context UriInfo info
) {
- return runner(AAIConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_APP,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
headers,
info,
HttpMethod.GET,
- new Callable<Response>() {
+ new AaiCallable<Response>() {
@Override
- public Response call() {
+ public Response process() {
return processGenericQueryResponse(headers, req, startNodeType, startNodeKeyParams, includeNodeTypes, depth, versionParam);
}
}
@@ -145,22 +165,17 @@ public class SearchProvider extends RESTAPI {
String realTime = headers.getRequestHeaders().getFirst("Real-Time");
//only consider header value for search
DBConnectionType type = this.determineConnectionType("force-cache", realTime);
- final Version version;
- if ("latest".equals(versionParam)) {
- version = AAIProperties.LATEST;
- } else {
- version = Version.valueOf(versionParam);
- }
+
+ final SchemaVersion version = new SchemaVersion(versionParam);
+
final ModelType factoryType = ModelType.MOXY;
- Loader loader = LoaderFactory.createLoaderForVersion(factoryType, version);
+ Loader loader = loaderFactory.createLoaderForVersion(factoryType, version);
TransactionalGraphEngine dbEngine = new JanusGraphDBEngine(
QueryStyle.TRAVERSAL,
type,
loader);
DBSerializer dbSerializer = new DBSerializer(version, dbEngine, factoryType, fromAppId);
- UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer);
- SearchGraph searchGraph = new SearchGraph();
-
+ UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer, schemaVersions, this.basePath);
LoggingContext.startTime();
StopWatch.conditionalStart();
searchResult = searchGraph.runGenericQuery(
@@ -240,15 +255,15 @@ public class SearchProvider extends RESTAPI {
@Context UriInfo info)
{
- return runner(AAIConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_APP,
- AAIConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
+ return runner(TraversalConstants.AAI_TRAVERSAL_TIMEOUT_ENABLED,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_APP,
+ TraversalConstants.AAI_TRAVERSAL_TIMEOUT_LIMIT,
headers,
info,
HttpMethod.GET,
- new Callable<Response>() {
+ new AaiCallable<Response>() {
@Override
- public Response call() {
+ public Response process() {
return processNodesQueryResponse(headers, req, searchNodeType, edgeFilterList, filterList, versionParam);
}
}
@@ -279,25 +294,19 @@ public class SearchProvider extends RESTAPI {
//only consider header value for search
DBConnectionType type = this.determineConnectionType("force-cache", realTime);
- final Version version;
- if ("latest".equals(versionParam)) {
- version = AAIProperties.LATEST;
- } else {
- version = Version.valueOf(versionParam);
- }
+ final SchemaVersion version = new SchemaVersion(versionParam);
+
final ModelType factoryType = ModelType.MOXY;
- Loader loader = LoaderFactory.createLoaderForVersion(factoryType, version);
+ Loader loader = loaderFactory.createLoaderForVersion(factoryType, version);
TransactionalGraphEngine dbEngine = new JanusGraphDBEngine(
QueryStyle.TRAVERSAL,
type,
loader);
DBSerializer dbSerializer = new DBSerializer(version, dbEngine, factoryType, fromAppId);
- UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer);
- SearchGraph searchGraph = new SearchGraph();
+ UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer, schemaVersions, this.basePath);
LoggingContext.startTime();
StopWatch.conditionalStart();
-
searchResult = searchGraph.runNodesQuery(headers,
searchNodeType,
edgeFilterList,
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/util/ConvertQueryPropertiesToJson.java b/aai-traversal/src/main/java/org/onap/aai/rest/util/ConvertQueryPropertiesToJson.java
index e8d1547..3bf9087 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/util/ConvertQueryPropertiesToJson.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/util/ConvertQueryPropertiesToJson.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -32,102 +32,86 @@ import java.util.Properties;
import org.onap.aai.util.AAIConstants;
public class ConvertQueryPropertiesToJson {
-
- private static final int maxfilesize = 256000;
-
- private void addStart(StringBuilder sb) {
- sb.append("{\n \"stored-queries\":[{\n");
- }
-
- private void addRequiredQueryProperties(StringBuilder sb, List<String> rqd) {
- Iterator it = rqd.iterator();
- sb.append(" \"query\":{\n \"required-properties\":[");
- while (it.hasNext()) {
- sb.append("\"" + it.next() + "\"");
- if (it.hasNext()) {
- sb.append(",");
- }
- }
- sb.append("]\n },\n");
- }
-
- private void addAnotherQuery(StringBuilder sb, String queryName, String query, List<String> rqd) {
- sb.append(" \"" + queryName + "\":{\n");
- if (!rqd.isEmpty()) {
- addRequiredQueryProperties(sb, rqd);
- }
- sb.append(" \"stored-query\":\"" + query + "\"\n }\n },{\n");
- }
-
- private void addLastQuery(StringBuilder sb, String queryName, String query, List<String> rqd) {
- sb.append(" \"" + queryName + "\":{\n");
- if (!rqd.isEmpty()) {
- addRequiredQueryProperties(sb, rqd);
- }
- sb.append(" \"stored-query\":\"" + query + "\"\n }\n }]\n}\n");
- }
-
- private String get2ndParameter(String paramString) {
- String endParams = paramString.substring(0, paramString.indexOf(')'));
- String result = endParams.substring(endParams.indexOf(',') + 1);
- String lastParam = result.trim();
- if (lastParam.startsWith("\\") || lastParam.startsWith("'") || lastParam.startsWith("new ")) {
- return null;
- }
-
- return lastParam;
- }
-
- private List<String> findRqdProperties(String query) {
- String[] parts = query.split("getVerticesByProperty");
- List<String> result = new ArrayList<String>();
- if (parts.length == 1) {
- return result;
- }
- int count = 0;
- String foundRqdProperty;
- while (count++ < parts.length - 1) {
- foundRqdProperty = get2ndParameter(parts[count]);
- if (foundRqdProperty != null && !result.contains(foundRqdProperty)) {
- result.add(foundRqdProperty);
- }
- }
- return result;
- }
-
- public String convertProperties(Properties props) {
- Enumeration<?> e = props.propertyNames();
- StringBuilder sb = new StringBuilder(maxfilesize);
- String queryName;
- String query;
- addStart(sb);
- List<String> rqd;
- while (e.hasMoreElements()) {
- queryName = (String) e.nextElement();
- query = props.getProperty(queryName).trim().replace("\"", "\\\"");
- rqd = findRqdProperties(query);
- if (e.hasMoreElements()) {
- addAnotherQuery(sb, queryName, query, rqd);
- } else {
- addLastQuery(sb, queryName, query, rqd);
- }
- }
-
+
+ private final static int maxfilesize = 256000;
+
+ private void addStart( StringBuilder sb ) {
+ sb.append("{\n \"stored-queries\":[{\n");
+ }
+
+ private void addRequiredQueryProperties( StringBuilder sb, List<String> rqd ) {
+ Iterator it = rqd.iterator();
+ sb.append(" \"query\":{\n \"required-properties\":[");
+ while( it.hasNext()) {
+ sb.append("\"" + it.next() + "\"");
+ if ( it.hasNext()) {
+ sb.append(",");
+ }
+ }
+ sb.append("]\n },\n");
+ }
+
+ private void addAnotherQuery( StringBuilder sb, String queryName, String query, List<String> rqd ) {
+ sb.append(" \"" + queryName + "\":{\n");
+ if ( !rqd.isEmpty()) {
+ addRequiredQueryProperties( sb, rqd);
+ }
+ sb.append(" \"stored-query\":\"" + query + "\"\n }\n },{\n");
+ }
+
+ private void addLastQuery( StringBuilder sb, String queryName, String query, List<String> rqd ) {
+ sb.append(" \"" + queryName + "\":{\n");
+ if ( !rqd.isEmpty() ) {
+ addRequiredQueryProperties( sb, rqd);
+ }
+ sb.append(" \"stored-query\":\"" + query + "\"\n }\n }]\n}\n");
+ }
+
+ private String get2ndParameter( String paramString) {
+ String endParams = paramString.substring(0, paramString.indexOf(')'));
+ String result = endParams.substring(endParams.indexOf(',') + 1 );
+ String lastParam = result.trim();
+ if ( lastParam.startsWith("\\") || lastParam.startsWith("'") || lastParam.startsWith("new ") ){
+ return null;
+ }
+
+ return lastParam;
+ }
+
+ private List<String> findRqdProperties( String query) {
+ String[] parts = query.split("getVerticesByProperty");
+ List<String> result = new ArrayList<String>();
+ if ( parts.length == 1 )
+ return result;
+ int count = 0;
+ String foundRqdProperty;
+ while ( count++ < parts.length - 1 ) {
+ foundRqdProperty = get2ndParameter(parts[count]);
+ if ( foundRqdProperty != null && !result.contains(foundRqdProperty)) {
+ result.add(foundRqdProperty);
+ }
+ }
+ return result;
+ }
+
+ public String convertProperties( Properties props ) {
+ Enumeration<?> e = props.propertyNames();
+ StringBuilder sb = new StringBuilder(maxfilesize);
+ String queryName;
+ String query;
+ addStart( sb );
+ List<String> rqd;
+ while ( e.hasMoreElements()) {
+ queryName = (String)e.nextElement();
+ query = props.getProperty(queryName).trim().replace("\"", "\\\"");
+ rqd = findRqdProperties( query);
+ if ( e.hasMoreElements()) {
+ addAnotherQuery( sb, queryName, query, rqd);
+ } else {
+ addLastQuery( sb, queryName, query, rqd);
+ }
+ }
+
return sb.toString();
- }
-
- public static void main(String[] args) {
- File queryFile = new File(AAIConstants.AAI_HOME_ETC_QUERY);
- Properties properties = new Properties();
- try (FileInputStream fis = new FileInputStream(queryFile)) {
- properties.load(fis);
- } catch (IOException e) {
- e.printStackTrace();
- System.out.println("Error occurred during the processing of query file: " + e);
- }
- ConvertQueryPropertiesToJson c = new ConvertQueryPropertiesToJson();
- String json = c.convertProperties(properties);
- System.out.println("returned json:\n" + json);
- }
-
+ }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java b/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java
index 3b4ea73..7d04bf7 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java
@@ -133,6 +133,9 @@ public class ValidateEncoding {
valid = false;
}
for (String item : params.get(key)) {
+ if(item.contains("+")){
+ item = item.replaceAll("\\+", "%20");
+ }
if (!this.checkEncoding(item)) {
valid = false;
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/service/AuthorizationService.java b/aai-traversal/src/main/java/org/onap/aai/service/AuthorizationService.java
index 616bb9c..2bb2794 100644
--- a/aai-traversal/src/main/java/org/onap/aai/service/AuthorizationService.java
+++ b/aai-traversal/src/main/java/org/onap/aai/service/AuthorizationService.java
@@ -22,7 +22,9 @@ package org.onap.aai.service;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import org.eclipse.jetty.util.security.Password;
+import org.onap.aai.Profiles;
import org.onap.aai.util.AAIConstants;
+import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
@@ -36,6 +38,7 @@ import java.util.Map;
import java.util.stream.Stream;
@Service
+@Profile(Profiles.ONE_WAY_SSL)
public class AuthorizationService {
private static final EELFLogger logger = EELFManager.getInstance().getLogger(AuthorizationService.class);
diff --git a/aai-traversal/src/main/java/org/onap/aai/service/RetiredService.java b/aai-traversal/src/main/java/org/onap/aai/service/RetiredService.java
new file mode 100644
index 0000000..5989e31
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/service/RetiredService.java
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.service;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.util.Arrays;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+@Service
+@PropertySource("classpath:retired.properties")
+@PropertySource(value = "file:${server.local.startpath}/retired.properties")
+public class RetiredService {
+
+ private String retiredPatterns;
+
+ private String retiredAllVersions;
+
+ private List<Pattern> retiredPatternsList;
+ private List<Pattern> retiredAllVersionList;
+
+ @PostConstruct
+ public void initialize(){
+ this.retiredPatternsList = Arrays.stream(retiredPatterns.split(",")).map(Pattern::compile).collect(Collectors.toList());
+ this.retiredAllVersionList = Arrays.stream(retiredAllVersions.split(",")).map(Pattern::compile).collect(Collectors.toList());
+ }
+
+ @Value("${retired.api.pattern.list}")
+ public void setRetiredPatterns(String retiredPatterns){
+ this.retiredPatterns = retiredPatterns;
+ }
+
+ public List<Pattern> getRetiredPatterns(){
+ return retiredPatternsList;
+ }
+
+ @Value("${retired.api.all.versions}")
+ public void setRetiredAllVersions(String retiredPatterns){
+ this.retiredAllVersions = retiredPatterns;
+ }
+
+ public List<Pattern> getRetiredAllVersionList(){
+ return retiredAllVersionList;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java b/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
deleted file mode 100644
index d743adb..0000000
--- a/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.util;
-
-import java.io.IOException;
-import java.util.UUID;
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import org.onap.aai.dbmap.AAIGraph;
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.introspection.ModelInjestor;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.logging.LogFormatTools;
-import org.onap.aai.logging.LoggingContext;
-import org.onap.aai.logging.LoggingContext.StatusCode;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-public class AAIAppServletContextListener implements ServletContextListener {
-
- private static final String MICRO_SVC="aai-traversal";
- private static final String ACTIVEMQ_TCP_URL = "tcp://localhost:61446";
- private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAIAppServletContextListener.class.getName());
-
- /**
- * Destroys Context
- *
- * @param arg0 the ServletContextEvent
- */
- public void contextDestroyed(ServletContextEvent arg0) {
- }
-
- /**
- * Initializes Context
- *
- * @param arg0 the ServletContextEvent
- */
- public void contextInitialized(ServletContextEvent arg0) {
- System.setProperty("org.onap.aai.serverStarted", "false");
- System.setProperty("aai.service.name", "traversal");
-
- LoggingContext.save();
- LoggingContext.component("init");
- LoggingContext.partnerName("NA");
- LoggingContext.targetEntity(MICRO_SVC);
- LoggingContext.requestId(UUID.randomUUID().toString());
- LoggingContext.serviceName(MICRO_SVC);
- LoggingContext.targetServiceName("contextInitialized");
- LoggingContext.statusCode(StatusCode.COMPLETE);
- LOGGER.info("AAI Server initialization started...");
- try {
- LOGGER.info("Loading aaiconfig.properties");
- AAIConfig.init();
-
- LOGGER.info("Loading error.properties");
- ErrorLogHelper.loadProperties();
-
- LOGGER.info("Loading graph database");
-
- AAIGraph.getInstance();
- ModelInjestor.getInstance();
-
- LOGGER.info("A&AI Server initialization succcessful.");
- System.setProperty("activemq.tcp.url", ACTIVEMQ_TCP_URL);
- System.setProperty("org.onap.aai.serverStarted", "true");
-
- Runtime.getRuntime().addShutdownHook(new Thread() {
- public void run() {
- LOGGER.info("AAIGraph shutting down");
- AAIGraph.getInstance().graphShutdown();
- LOGGER.info("AAIGraph shutdown");
- System.out.println("Shutdown hook triggered.");
- }
- });
-
- } catch (AAIException e) {
- ErrorLogHelper.logException(e);
- throw new RuntimeException("AAIException caught while initializing A&AI server", e);
- } catch (IOException e) {
- ErrorLogHelper.logError("AAI_4000", e.getMessage());
- throw new RuntimeException("IOException caught while initializing A&AI server", e);
- } catch (Exception e) {
- LOGGER.error("Unknown failure while initializing A&AI Server" + LogFormatTools.getStackTop(e));
- throw new RuntimeException("Unknown failure while initializing A&AI server", e);
- }
-
- LOGGER.info("Graph-Query MicroService Started");
- LOGGER.debug("Graph-Query MicroService Started");
- LoggingContext.restore();
-
- }
-}
diff --git a/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java b/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java
index 68d204f..a805e19 100644
--- a/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java
+++ b/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java
@@ -24,17 +24,18 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map.Entry;
-import java.util.Optional;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
-
+import org.onap.aai.config.SpringContextAware;
import org.onap.aai.introspection.Introspector;
import org.onap.aai.introspection.Loader;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.introspection.ModelType;
-import org.onap.aai.introspection.Version;
import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class MakeNamedQuery {
@@ -67,104 +68,98 @@ public class MakeNamedQuery {
System.exit(0);
}
+ AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+ "org.onap.aai.config",
+ "org.onap.aai.setup"
+ );
- Loader loader = LoaderFactory.createLoaderForVersion(ModelType.MOXY, Version.valueOf(_apiVersion));
+ LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+ SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
- // iterate the collection of resources
+ if(schemaVersions.getVersions().contains(_apiVersion)){
- ArrayList<String> processedWidgets = new ArrayList<>();
+ Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, new SchemaVersion(_apiVersion));
+ // iterate the collection of resources
- HashMap<String, List<Introspector>> widgetToRelationship = new HashMap<String, List<Introspector>>();
- for (Entry<String, Introspector> aaiResEnt : loader.getAllObjects().entrySet()) {
- Introspector meObject = loader.introspectorFromName("model");
- // no need for a ModelVers DynamicEntity
+ ArrayList<String> processedWidgets = new ArrayList<>();
- Introspector aaiRes = aaiResEnt.getValue();
- if (!(aaiRes.isContainer() || aaiRes.getName().equals("aai-internal"))) {
- String resource = aaiRes.getName();
+ HashMap<String, List<Introspector>> widgetToRelationship = new HashMap<>();
+ for (Entry<String, Introspector> aaiResEnt : loader.getAllObjects().entrySet()) {
+ Introspector meObject = loader.introspectorFromName("model");
+ // no need for a ModelVers DynamicEntity
- if (processedWidgets.contains(resource)) {
- continue;
- }
- processedWidgets.add(resource);
-
- String widgetName = resource;
- String filePathString = widgetJsonDir + "/" + widgetName + "-" + modelVersion + ".json";
- File f = new File(filePathString);
- if (f.exists()) {
- System.out.println(f.toString());
- String json = FileUtils.readFileToString(f);
-
- meObject = loader.unmarshal("Model", json);
- String modelInvariantId = meObject.getValue("model-invariant-id");
- if (meObject.hasProperty("model-vers")) {
- Introspector modelVers = meObject.getWrappedValue("model-vers");
- List<Introspector> modelVerList = modelVers.getWrappedListValue("model-ver");
- for (Introspector modelVer : modelVerList) {
-
- List<Introspector> relList = new ArrayList<Introspector>();
- Introspector widgetRelationship = makeWidgetRelationship(loader, modelInvariantId,
- modelVer.getValue("model-version-id").toString());
- relList.add(widgetRelationship);
-
- widgetToRelationship.put(widgetName, relList);
+ Introspector aaiRes = aaiResEnt.getValue();
+
+ if (!(aaiRes.isContainer() || aaiRes.getName().equals("aai-internal"))) {
+ String resource = aaiRes.getName();
+
+ if (processedWidgets.contains(resource)) {
+ continue;
+ }
+ processedWidgets.add(resource);
+
+ String widgetName = resource;
+ String filePathString = widgetJsonDir + "/" + widgetName + "-" + modelVersion + ".json";
+ File f = new File(filePathString);
+ if (f.exists()) {
+ System.out.println(f.toString());
+ String json = FileUtils.readFileToString(f);
+
+ meObject = loader.unmarshal("Model", json);
+ String modelInvariantId = meObject.getValue("model-invariant-id");
+ if (meObject.hasProperty("model-vers")) {
+ Introspector modelVers = meObject.getWrappedValue("model-vers");
+ List<Introspector> modelVerList = (List<Introspector>) modelVers.getWrappedListValue("model-ver");
+ for (Introspector modelVer : modelVerList) {
+
+ List<Introspector> relList = new ArrayList<Introspector>();
+ Introspector widgetRelationship = makeWidgetRelationship(loader, modelInvariantId,
+ modelVer.getValue("model-version-id").toString());
+ relList.add(widgetRelationship);
+
+ widgetToRelationship.put(widgetName, relList);
+ }
}
}
}
}
- }
-
-// esr-system-info-from-vnf=builder.store('x').union(\
-// builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(\
-// builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x')\
-// .createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x')\
-// .createEdgeTraversal(EdgeType.TREE, 'cloud-region', 'esr-system-info').store('x')\
-// )).cap('x').unfold.dedup()
-
- //source vnf-id, related service-instance-id, all related vnfs in this service-instance-id
-
- //this should be abstracted and moved to a file
-
- HashMap<String, List<Introspector>> relationshipMap = new HashMap<String, List<Introspector>>();
-
- List<Introspector> genericVnfRelationship = widgetToRelationship.get("generic-vnf");
- List<Introspector> vserverRelationship = widgetToRelationship.get("vserver");
- List<Introspector> tenantRelationship = widgetToRelationship.get("tenant");
- List<Introspector> cloudRegionRelationship = widgetToRelationship.get("cloud-region");
- List<Introspector> esrSystemInfoRelationship = widgetToRelationship.get("esr-system-info");
-
- Introspector namedQueryObj = loader.introspectorFromName("named-query");
- namedQueryObj.setValue("named-query-uuid", namedQueryUuid);
- namedQueryObj.setValue("named-query-name", "vnf-to-esr-system-info");
- namedQueryObj.setValue("named-query-version", "1.0");
- namedQueryObj.setValue("description", "Named Query - VNF to ESR System Info");
-
- Optional<Introspector> genericVnfNQE = tryToSetUpNQElements(Optional.of(namedQueryObj), genericVnfRelationship);
-
- Optional<Introspector> vserverNQE = tryToSetUpNQElements(genericVnfNQE, vserverRelationship);
-
- Optional<Introspector> tenantNQE = tryToSetUpNQElements(vserverNQE, tenantRelationship);
-
- Optional<Introspector> cloudRegionNQE = tryToSetUpNQElements(tenantNQE, cloudRegionRelationship);
- Optional<Introspector> esrSystemInfoNQE = tryToSetUpNQElements(cloudRegionNQE, esrSystemInfoRelationship);
-
- System.out.println(namedQueryObj.marshal(true));
-
- System.exit(0);
+ //source vnf-id, related service-instance-id, all related vnfs in this service-instance-id
+ //this should be abstracted and moved to a file
- }
+ HashMap<String, List<Introspector>> relationshipMap = new HashMap<String, List<Introspector>>();
+
+ List<Introspector> genericVnfRelationship = widgetToRelationship.get("generic-vnf");
+ List<Introspector> vserverRelationship = widgetToRelationship.get("vserver");
+ List<Introspector> tenantRelationship = widgetToRelationship.get("tenant");
+ List<Introspector> cloudRegionRelationship = widgetToRelationship.get("cloud-region");
+ List<Introspector> esrSystemInfoRelationship = widgetToRelationship.get("esr-system-info");
+
+ Introspector namedQueryObj = loader.introspectorFromName("named-query");
+ namedQueryObj.setValue("named-query-uuid", namedQueryUuid);
+ namedQueryObj.setValue("named-query-name", "vnf-to-esr-system-info");
+ namedQueryObj.setValue("named-query-version", "1.0");
+ namedQueryObj.setValue("description", "Named Query - VNF to ESR System Info");
+
+ Introspector genericVnfNQE = setupNQElements(namedQueryObj, genericVnfRelationship);
+
+ Introspector vserverNQE = setupNQElements(genericVnfNQE, vserverRelationship);
+
+ Introspector tenantNQE = setupNQElements(vserverNQE, tenantRelationship);
+
+ Introspector cloudRegionNQE = setupNQElements(tenantNQE, cloudRegionRelationship);
+
+ Introspector esrSystemInfoNQE = setupNQElements(cloudRegionNQE, esrSystemInfoRelationship);
+
+ System.out.println(namedQueryObj.marshal(true));
- private static Optional<Introspector> tryToSetUpNQElements(Optional<Introspector> genericVnfNQE, List<Introspector> vserverRelationship) {
- if(genericVnfNQE.isPresent()) {
- return Optional.ofNullable(setupNQElements(genericVnfNQE.get(), vserverRelationship));
- } else {
- return Optional.empty();
}
- }
+ System.exit(0);
+
+ }
private static List<Introspector> getRels(String widgetName, HashMap<String, Introspector> widgetToRelationship) {
List<Introspector> relList = new ArrayList<Introspector>();
Introspector genericVnfRelationship = widgetToRelationship.get(widgetName);
@@ -180,16 +175,14 @@ public class MakeNamedQuery {
if (nqeObj.getWrappedValue("named-query-elements") != null) {
newNQElements = nqeObj.getWrappedValue("named-query-elements");
nqElementList = newNQElements.getValue("named-query-element");
- } else {
+ } else {
newNQElements = nqeObj.newIntrospectorInstanceOfProperty("named-query-elements");
nqeObj.setValue("named-query-elements", newNQElements.getUnderlyingObject());
- nqElementList = newNQElements.getValue("named-query-element");
+ nqElementList = (List<Object>)newNQElements.getValue("named-query-element");
}
newNQElement = loadNQElement(newNQElements, listOfRelationships);
- if (newNQElement != null) {
- nqElementList.add(newNQElement.getUnderlyingObject());
- }
-
+ nqElementList.add(newNQElement.getUnderlyingObject());
+
} catch (AAIUnknownObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
@@ -210,7 +203,7 @@ public class MakeNamedQuery {
Introspector newRelationshipList = newNqElement.getLoader().introspectorFromName("relationship-list");
newNqElement.setValue("relationship-list", newRelationshipList.getUnderlyingObject());
- List<Object> newRelationshipListList = newRelationshipList.getValue("relationship");
+ List<Object> newRelationshipListList = (List<Object>)newRelationshipList.getValue("relationship");
for (Introspector rel : listOfRelationships) {
newRelationshipListList.add(rel.getUnderlyingObject());
@@ -232,7 +225,7 @@ public class MakeNamedQuery {
try {
newRelationship = loader.introspectorFromName("relationship");
- List<Object> newRelationshipData = newRelationship.getValue("relationship-data");
+ List<Object> newRelationshipData = (List<Object>)newRelationship.getValue("relationship-data");
newRelationship.setValue("related-to", "model");
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java b/aai-traversal/src/main/java/org/onap/aai/util/TraversalConstants.java
index 2339764..69e1e9f 100644
--- a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java
+++ b/aai-traversal/src/main/java/org/onap/aai/util/TraversalConstants.java
@@ -17,11 +17,23 @@
* limitations under the License.
* ============LICENSE_END=========================================================
*/
-package org.onap.aai.rest.retired;
+package org.onap.aai.util;
-import javax.ws.rs.Path;
-
-@Path("{version: v[3-7]}")
-public class V3ThroughV7Consumer extends RetiredConsumer {
+public final class TraversalConstants {
+ public static final int AAI_QUERY_PORT = 8446;
+
+ public static final String AAI_TRAVERSAL_TIMEOUT_LIMIT = "aai.traversal.timeoutlimit";
+ public static final String AAI_TRAVERSAL_TIMEOUT_ENABLED = "aai.traversal.timeoutenabled";
+ public static final String AAI_TRAVERSAL_TIMEOUT_APP = "aai.traversal.timeout.appspecific";
+
+ public static final String AAI_TRAVERSAL_DSL_TIMEOUT_LIMIT = "aai.traversal.dsl.timeoutlimit";
+ public static final String AAI_TRAVERSAL_DSL_TIMEOUT_ENABLED = "aai.traversal.dsl.timeoutenabled";
+ public static final String AAI_TRAVERSAL_DSL_TIMEOUT_APP = "aai.traversal.dsl.timeout.appspecific";
+
+ public static final long HISTORY_MAX_HOURS = 192;
+
+ private TraversalConstants() {
+ // prevent instantiation
+ }
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/web/JerseyConfiguration.java b/aai-traversal/src/main/java/org/onap/aai/web/JerseyConfiguration.java
index b979fb8..231c82b 100644
--- a/aai-traversal/src/main/java/org/onap/aai/web/JerseyConfiguration.java
+++ b/aai-traversal/src/main/java/org/onap/aai/web/JerseyConfiguration.java
@@ -8,7 +8,7 @@
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -22,8 +22,9 @@ package org.onap.aai.web;
import org.glassfish.jersey.filter.LoggingFilter;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletProperties;
+import org.onap.aai.rest.DslConsumer;
import org.onap.aai.rest.QueryConsumer;
-import org.onap.aai.rest.retired.V3ThroughV7Consumer;
+import org.onap.aai.rest.RecentAPIConsumer;
import org.onap.aai.rest.search.ModelAndNamedQueryRestProvider;
import org.onap.aai.rest.search.SearchProvider;
import org.onap.aai.rest.util.EchoResponse;
@@ -34,7 +35,6 @@ import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import javax.annotation.Priority;
-import javax.ws.rs.ApplicationPath;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.ContainerResponseFilter;
import java.util.List;
@@ -43,7 +43,6 @@ import java.util.logging.Logger;
import java.util.stream.Collectors;
@Component
-@ApplicationPath("/aai")
public class JerseyConfiguration extends ResourceConfig {
private static final Logger log = Logger.getLogger(JerseyConfiguration.class.getName());
@@ -58,8 +57,8 @@ public class JerseyConfiguration extends ResourceConfig {
register(SearchProvider.class);
register(ModelAndNamedQueryRestProvider.class);
register(QueryConsumer.class);
-
- register(V3ThroughV7Consumer.class);
+ register(RecentAPIConsumer.class);
+ register(DslConsumer.class);
register(EchoResponse.class);
//Request Filters
@@ -83,25 +82,28 @@ public class JerseyConfiguration extends ResourceConfig {
// Filter them based on the clazz that was passed in
Set<Class<? extends ContainerRequestFilter>> filters = reflections.getSubTypesOf(ContainerRequestFilter.class);
+
// Check to ensure that each of the filter has the @Priority annotation and if not throw exception
for (Class filterClass : filters) {
if (filterClass.getAnnotation(Priority.class) == null) {
- throw new RuntimeException(
- "Container filter " + filterClass.getName() + " does not have @Priority annotation");
+ throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
}
}
// Turn the set back into a list
List<Class<? extends ContainerRequestFilter>> filtersList = filters
- .stream()
- .filter(f -> !(f.isAnnotationPresent(Profile.class)
- && !env.acceptsProfiles(f.getAnnotation(Profile.class).value()))
- )
- .collect(Collectors.toList());
+ .stream()
+ .filter(f -> {
+ if (f.isAnnotationPresent(Profile.class)
+ && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+ return false;
+ }
+ return true;
+ })
+ .collect(Collectors.toList());
// Sort them by their priority levels value
- filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value())
- .compareTo(c2.getAnnotation(Priority.class).value()));
+ filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
// Then register this to the jersey application
filtersList.forEach(this::register);
@@ -112,29 +114,31 @@ public class JerseyConfiguration extends ResourceConfig {
// Find all the classes within the interceptors package
Reflections reflections = new Reflections("org.onap.aai.interceptors");
// Filter them based on the clazz that was passed in
- Set<Class<? extends ContainerResponseFilter>> filters = reflections
- .getSubTypesOf(ContainerResponseFilter.class);
+ Set<Class<? extends ContainerResponseFilter>> filters = reflections.getSubTypesOf(ContainerResponseFilter.class);
+
// Check to ensure that each of the filter has the @Priority annotation and if not throw exception
for (Class filterClass : filters) {
if (filterClass.getAnnotation(Priority.class) == null) {
- throw new RuntimeException(
- "Container filter " + filterClass.getName() + " does not have @Priority annotation");
+ throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
}
}
// Turn the set back into a list
List<Class<? extends ContainerResponseFilter>> filtersList = filters.stream()
- .filter(f -> !(f.isAnnotationPresent(Profile.class)
- && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())))
- .collect(Collectors.toList());
+ .filter(f -> {
+ if (f.isAnnotationPresent(Profile.class)
+ && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+ return false;
+ }
+ return true;
+ })
+ .collect(Collectors.toList());
// Sort them by their priority levels value
- filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value())
- .compareTo(c2.getAnnotation(Priority.class).value()));
+ filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
// Then register this to the jersey application
filtersList.forEach(this::register);
}
-
}
diff --git a/aai-traversal/src/main/java/org/onap/aai/web/LocalHostAccessLog.java b/aai-traversal/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
index 9d0aa9a..4bc3300 100644
--- a/aai-traversal/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
+++ b/aai-traversal/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
@@ -22,6 +22,8 @@ package org.onap.aai.web;
import ch.qos.logback.access.jetty.RequestLogImpl;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
import org.springframework.boot.context.embedded.jetty.JettyServerCustomizer;
@@ -34,7 +36,11 @@ import java.util.Arrays;
public class LocalHostAccessLog {
@Bean
- public EmbeddedServletContainerFactory jettyConfigBean(){
+ public EmbeddedServletContainerFactory jettyConfigBean(
+ @Value("${jetty.threadPool.maxThreads:200}") final String maxThreads,
+ @Value("${jetty.threadPool.minThreads:8}") final String minThreads
+ ){
+
JettyEmbeddedServletContainerFactory jef = new JettyEmbeddedServletContainerFactory();
jef.addServerCustomizers((JettyServerCustomizer) server -> {
@@ -52,6 +58,10 @@ public class LocalHostAccessLog {
requestLogHandler.setRequestLog(requestLogImpl);
handlers.addHandler(requestLogHandler);
server.setHandler(handlers);
+
+ final QueuedThreadPool threadPool = server.getBean(QueuedThreadPool.class);
+ threadPool.setMaxThreads(Integer.valueOf(maxThreads));
+ threadPool.setMinThreads(Integer.valueOf(minThreads));
});
return jef;
}
diff --git a/aai-traversal/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4 b/aai-traversal/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4
index 2713677..f0c866a 100644
--- a/aai-traversal/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4
+++ b/aai-traversal/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4
@@ -6,21 +6,15 @@ grammar AAIDsl;
aaiquery: dslStatement;
-dslStatement: (queryStep) (traverseStep | unionTraverseStep)* limitStep*;
-
-queryStep : (singleNodeStep |singleQueryStep | multiQueryStep);
+dslStatement: (singleNodeStep ) (traverseStep )* limitStep*;
unionQueryStep: LBRACKET dslStatement ( COMMA (dslStatement))* RBRACKET;
-traverseStep: (TRAVERSE ( queryStep | unionQueryStep));
-
-unionTraverseStep: TRAVERSE unionQueryStep;
+traverseStep: (TRAVERSE ( singleNodeStep | unionQueryStep));
-singleNodeStep: NODE STORE? ;
-singleQueryStep: NODE STORE? (filterStep | filterTraverseStep);
-multiQueryStep: NODE STORE? (filterStep | filterTraverseStep) (filterStep)+;
+singleNodeStep: NODE STORE? (filterStep | filterTraverseStep)*;
-filterStep: NOT? (LPAREN KEY COMMA KEY (COMMA KEY)*RPAREN);
+filterStep: NOT? (LPAREN KEY (COMMA KEY)* RPAREN);
filterTraverseStep: (LPAREN traverseStep* RPAREN);
limitStep: LIMIT NODE;
@@ -28,7 +22,8 @@ limitStep: LIMIT NODE;
LIMIT: 'LIMIT';
NODE: ID;
-KEY: ['] ID ['] ;
+KEY: ['] (ID | ' ')* ['] ;
+
AND: [&];
diff --git a/aai-traversal/src/main/resources/application.properties b/aai-traversal/src/main/resources/application.properties
index bdb7535..a1a14af 100644
--- a/aai-traversal/src/main/resources/application.properties
+++ b/aai-traversal/src/main/resources/application.properties
@@ -10,23 +10,15 @@ spring.jersey.type=filter
server.contextPath=/
spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration
-spring.profiles.active=production
+spring.jersey.application-path=${schema.uri.base.path}
+
#The max number of active threads in this pool
-server.tomcat.max-threads=200
+jetty.threadPool.maxThreads=200
#The minimum number of threads always kept alive
-server.tomcat.min-Spare-Threads=25
+jetty.threadPool.minThreads=8
#The number of milliseconds before an idle thread shutsdown, unless the number of active threads are less or equal to minSpareThreads
server.tomcat.max-idle-time=60000
-
-#Add this properties only if you want to change the URL, AJSC Framework interceptors will intercept
-#com.att.ajsc.common.interceptors.PreInterceptor.url=/**
-#com.att.ajsc.common.interceptors.PostInterceptor.url=/**
-
-#Servlet context parameters
-server.context_parameters.p-name=value #context parameter with p-name as key and value as value.
-kubernetes.namespace=org-onap-aai
-
# If you get an application startup failure that the port is already taken
# If thats not it, please check if the key-store file path makes sense
server.local.startpath=aai-traversal/src/main/resources/
@@ -34,39 +26,52 @@ server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
server.port=8446
server.ssl.enabled-protocols=TLSv1.1,TLSv1.2
+server.ssl.client-auth=want
+server.ssl.key-store-type=JKS
+
+# Start of Internal Specific Properties
+spring.profiles.active=production,two-way-ssl
server.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore
server.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
server.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
server.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
-server.ssl.client-auth=want
-server.ssl.key-store-type=JKS
+
+schema.version.list=v8,v9,v10,v11,v12,v13,v14
+# Specifies which component should the oxm be looking at
+schema.source.name=onap
+# End of Internal Specific Properties
# JMS bind address host port
-jms.bind.address=tcp://localhost:61647
-dmaap.ribbon.eureka.enabled=false
+jms.bind.address=tcp://localhost:61646
+
dmaap.ribbon.listOfServers=localhost:3904
-# Number of milliseconds to wait before making ping requests again
-dmaap.ribbon.ServerListRefreshInterval=75000
-dmaap.ribbon.NFLoadBalancerPingInterval=75000
-dmaap.ribbon.NFLoadBalancerRuleClassName=com.netflix.loadbalancer.AvailabilityFilteringRule
-dmaap.ribbon.NFLoadBalancerPingClassName=org.onap.aai.config.HttpPingImpl
-dmaap.ribbon.EnableMarkingServerDownOnReachingFailureLimit=true
-dmaap.ribbon.ServerDownFailureLimit=1
-# This needs to be verified but it seems that adding this property should automatically
-# Make the dmaap client change the url from http to https depending on the server
-dmaap.ribbon.securePorts=3905
-# Custom Dmaap Specific Configuration
-dmaap.ribbon.username=
-dmaap.ribbon.password=
-dmaap.ribbon.health.endpoint=/topics/AAI-EVENT
-# Number of seconds to wait for the ping to work and might need to increase this if the pings are all failing
-dmaap.ribbon.pingport.timeout=3
+# Lists all of the versions in the schema
+# Schema related attributes for the oxm and edges
+# Any additional schema related attributes should start with prefix schema
+
+schema.configuration.location=N/A
+# Location of where the oxm files are
+schema.nodes.location=${server.local.startpath}/schema/${schema.source.name}/oxm/
+# Location of where the dbedgerules files are
+schema.edges.location=${server.local.startpath}/schema/${schema.source.name}/dbedgerules/
+# Location of where the stored queries are
+schema.queries.location=${server.local.startpath}/schema/${schema.source.name}/query/
+
+schema.ingest.file=${server.local.startpath}/application.properties
-niws.loadbalancer.dmaap.filterCircuitTripped=true
-niws.loadbalancer.dmaap.connectionFailureCountThreshold=3
-niws.loadbalancer.dmaap.circuitTripMaxTimeoutSeconds=180
-#dmaap.ribbon.retryableStatusCodes=404,503
-#dmaap.ribbon.retryableStatusCodes.MaxAutoRetriesNextServer=2
-#dmaap.ribbon.retryableStatusCodes.MaxAutoRetries=2
-#dmaap.ribbon.retryableStatusCodes.OkToRetryOnAllOperations=true
+# Schema Version Related Attributes
+schema.uri.base.path=/aai
+# Specifies from which version should the depth parameter to default to zero
+schema.version.depth.start=v9
+# Specifies from which version should the related link be displayed in response payload
+schema.version.related.link.start=v10
+# Specifies from which version should the client see only the uri excluding host info
+# Before this version server base will also be included
+schema.version.app.root.start=v11
+# Specifies from which version should the namespace be changed
+schema.version.namespace.change.start=v12
+# Specifies from which version should the client start seeing the edge label in payload
+schema.version.edge.label.start=v12
+# Specifies the version that the application should default to
+schema.version.api.default=v14
diff --git a/aai-traversal/src/main/resources/etc/appprops/Introscope.properties b/aai-traversal/src/main/resources/etc/appprops/Introscope.properties
deleted file mode 100644
index 319381e..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/Introscope.properties
+++ /dev/null
@@ -1,8 +0,0 @@
-#CSI environment uses the Introscope java agent for monitoring services. The AJSC has provided an implementation class that
-#provides basic information to the Introscope Enterprise Manager for each http request/response.
-
-introscopeEventClass=com.att.ajsc.introscope.IntroscopeEventNotifierImpl
-serviceName=N/A
-conversationId=N/A
-uniqueID=N/A
-userID=N/A
diff --git a/aai-traversal/src/main/resources/etc/appprops/PostProcessorInterceptors.properties b/aai-traversal/src/main/resources/etc/appprops/PostProcessorInterceptors.properties
deleted file mode 100644
index ca31a26..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/PostProcessorInterceptors.properties
+++ /dev/null
@@ -1,3 +0,0 @@
-#This properties file is for defining any PostProcessorInterceptors that have been created for your AJSC service.
-
-/**=org.onap.aai.interceptors.PostAaiAjscInterceptor
diff --git a/aai-traversal/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties b/aai-traversal/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties
index 4aa7445..3258623 100644
--- a/aai-traversal/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties
+++ b/aai-traversal/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties
@@ -1,4 +1,32 @@
+# Start of Internal Specific Properties
+TransportType=DME2
+Latitude=39.099727
+Longitude=-94.578567
+Version=1.0
+ServiceName=dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/events
+Environment=TEST
+routeOffer=MR1SBKCD
+Partner=KC_R
+SubContextPath=/
Protocol=http
+MethodType=POST
+username=m08479@aai.ecomp.att.com
+password=OBF:1wfm1z0h18xp1z0f1r411y7z1r3x1z0f18xt1z0d1wgc
contenttype=application/json
-host=localhost:3904
+host=klsd056.ipcoe.att.com:3904
+AFT_DME2_EXCHANGE_REQUEST_HANDLERS=com.att.nsa.test.PreferredRouteRequestHandler
+AFT_DME2_EXCHANGE_REPLY_HANDLERS=com.att.nsa.test.PreferredRouteReplyHandler
+AFT_DME2_REQ_TRACE_ON=true
+AFT_ENVIRONMENT=AFTUAT
+AFT_DME2_EP_CONN_TIMEOUT=10000
+AFT_DME2_ROUNDTRIP_TIMEOUT_MS=180000
+AFT_DME2_EP_READ_TIMEOUT_MS=50000
+DME2_REPLY_HANDLER_TIMEOUT_MS=180000
+DME2_PER_HANDLER_TIMEOUT_MS=180000
+sessionstickinessrequired=NO
+MessageSentThreadOccurance=50
+# End of Internal Specific Properties
topic=AAI-EVENT
+partition=AAI
+maxBatchSize=100
+maxAgeMs=250
diff --git a/aai-traversal/src/main/resources/etc/appprops/aaiconfig.properties b/aai-traversal/src/main/resources/etc/appprops/aaiconfig.properties
index 776ee4b..1a0c337 100644
--- a/aai-traversal/src/main/resources/etc/appprops/aaiconfig.properties
+++ b/aai-traversal/src/main/resources/etc/appprops/aaiconfig.properties
@@ -2,7 +2,7 @@
# ============LICENSE_START=======================================================
# org.onap.aai
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,67 +17,23 @@
# limitations under the License.
# ============LICENSE_END=========================================================
#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
####################################################################
# REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE
# TEMPLATE AND *ALL* DATAFILES
####################################################################
-aai.config.checktime=1000
-
# this could come from siteconfig.pl?
aai.config.nodename=AutomaticallyOverwritten
-aai.logging.hbase.interceptor=true
-aai.logging.hbase.enabled=true
-aai.logging.hbase.logrequest=true
-aai.logging.hbase.logresponse=true
-
-aai.logging.trace.enabled=true
-aai.logging.trace.logrequest=false
-aai.logging.trace.logresponse=false
-
aai.transaction.logging=true
aai.transaction.logging.get=true
aai.transaction.logging.post=true
-aai.tools.enableBasicAuth=true
-aai.tools.username=AAI
-aai.tools.password=AAI
-
aai.server.url.base=https://localhost:8443/aai/
aai.server.url=https://localhost:8443/aai/v14/
aai.global.callback.url=https://localhost:8443/aai/
-aai.auth.cspcookies_on=false
-aai.dbmodel.filename=ex5.json
-aai.truststore.filename=aai_keystore
-aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
-aai.keystore.filename=aai-client-cert.p12
-aai.keystore.passwd.x=
-
-# for transaction log
-hbase.table.name=aailogging-dev1.dev
-hbase.notificationTable.name=aainotification-dev1.dev
-hbase.table.timestamp.format=YYYYMMdd-HH:mm:ss:SSS
-hbase.zookeeper.quorum=ONAPserverTBD
-hbase.zookeeper.property.clientPort=2181
-hbase.zookeeper.znode.parent=/hbase
-
-
-# single primary server
-aai.primary.filetransfer.serverlist=ONAPserverTBD
-aai.primary.filetransfer.primarycheck=echo:8443/aai/util/echo
-aai.primary.filetransfer.pingtimeout=5000
-aai.primary.filetransfer.pingcount=5
-
-#rsync properties
-aai.rsync.command=rsync
-aai.rsync.options.list=-v|-t
-aai.rsync.remote.user=aaiadmin
-aai.rsync.enabled=y
aai.notification.current.version=v14
aai.notificationEvent.default.status=UNPROCESSED
@@ -89,32 +45,37 @@ aai.notificationEvent.default.severity=NORMAL
aai.notificationEvent.default.version=v14
# This one lets us enable/disable resource-version checking on updates/deletes
aai.resourceversion.enableflag=true
+# This will specify how deep the stack trace should be logged
aai.logging.maxStackTraceEntries=10
aai.default.api.version=v14
# Used by Model-processing code
-aai.model.delete.sleep.per.vtx.msec=500
aai.model.query.resultset.maxcount=50
aai.model.query.timeout.sec=90
-aai.model.proc.max.levels=50
-aai.edgeTag.proc.max.levels=50
-
-aai.dmaap.workload.enableEventProcessing=true
-
-aai.realtime.clients=RO,SDNC,MSO,SO
-
-aai.server.rebind=g
-
aai.jms.enable=false
#timeout for traversal enabled flag
aai.traversal.timeoutenabled=true
-
-#timeout app specific
-aai.traversal.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1|DCAE-CCS,-1|DCAES,-1|AAI-FILEGEN-GFPIP,-1
+aai.traversal.dsl.timeoutenabled=true
+#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)
+aai.traversal.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1|DCAE-CCS,-1|DCAES,-1|AAI-FILEGEN-GFPIP,-1|FitNesse-Test-PS2418,-1|FitNesse-Test-jenkins,-1|FitNesse-Test-ps2418,-1|FitNesse-Relationship-Test-PS2418,-1|FitNesse-Relationship-Test-ps2418,-1|FitNesse-Relationship-Test-jenkins,-1|VPESAT,-1|AAIRctFeed,-1|NewvceCreator,-1|IANewvceCreator,-1|AAI-CSIOVALS,-1
+aai.traversal.dsl.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1|DCAE-CCS,-1|DCAES,-1|AAI-FILEGEN-GFPIP,-1|FitNesse-Test-PS2418,-1|FitNesse-Test-jenkins,-1|FitNesse-Test-ps2418,-1|FitNesse-Relationship-Test-PS2418,-1|FitNesse-Relationship-Test-ps2418,-1|FitNesse-Relationship-Test-jenkins,-1|VPESAT,-1|AAIRctFeed,-1|NewvceCreator,-1|IANewvceCreator,-1|AAI-CSIOVALS,-1
#default timeout limit added for traversal if not overridden (in ms)
aai.traversal.timeoutlimit=180000
+aai.traversal.dsl.timeoutlimit=180000
+
+# Start of INTERNAL Specific Properties
+
+aai.truststore.filename=tomcat_keystore
+aai.truststore.passwd.x=OBF:1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o
+aai.keystore.filename=aai-client-cert.p12
+aai.keystore.passwd.x=OBF:1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o
+
+aai.realtime.clients=RO,SDNC,MSO
+aai.aic25.cloudregion.owner=attaic
+aai.aic25.cloudregion.id=AAIAIC25
+# End of INTERNAL Specific Properties
diff --git a/aai-traversal/src/main/resources/etc/appprops/default-logback.xml b/aai-traversal/src/main/resources/etc/appprops/default-logback.xml
deleted file mode 100644
index 655157b..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/default-logback.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<!--
-
- ============LICENSE_START=======================================================
- org.onap.aai
- ================================================================================
- Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
-
- ECOMP is a trademark and service mark of AT&T Intellectual Property.
-
--->
-<configuration debug="false">
- <property name="defaultPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" />
-
- <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <pattern>${defaultPattern}</pattern>
- </encoder>
- </appender>
-
- <logger name="org.reflections" level="WARN"/>
- <logger name="org.apache.zookeeper" level="WARN"/>
- <logger name="org.apache.hadoop" level="WARN"/>
- <logger name="com.thinkaurelius" level="WARN"/>
- <logger name="ch.qos.logback.classic" level="WARN" />
- <logger name="ch.qos.logback.core" level="WARN" />
-
- <root level="INFO">
- <appender-ref ref="STDOUT"/>
- </root>
-</configuration>
diff --git a/aai-traversal/src/main/resources/etc/appprops/error.properties b/aai-traversal/src/main/resources/etc/appprops/error.properties
index d25a2bf..d1e39cc 100644
--- a/aai-traversal/src/main/resources/etc/appprops/error.properties
+++ b/aai-traversal/src/main/resources/etc/appprops/error.properties
@@ -32,6 +32,11 @@ AAI_3011=5:6:WARN:3011:400:3000:Unknown XML namespace used in payload
AAI_3012=5:6:WARN:3012:400:3012:Unrecognized AAI function
AAI_3013=5:6:WARN:3013:400:3013:Query payload missing required parameters %1
AAI_3014=5:6:WARN:3014:400:3014:Query payload is invalid %1
+AAI_3018=5:6:WARN:3018:400:3018:Query URI missing required parameters
+AAI_3019=5:6:WARN:3019:400:3019:Query URI sending conflicting parameters
+AAI_3020=5:6:WARN:3020:400:3020:Query URI parameters outside bounds
+AAI_3021=5:6:WARN:3021:400:3021:Invalid parameters to Recents API
+
# pol errors
AAI_3100=5:1:WARN:3100:400:3100:Unsupported operation %1
AAI_3101=5:1:WARN:3101:403:3101:Attempt by client %1 to execute API %2
@@ -112,6 +117,7 @@ AAI_6144=5:4:WARN:6144:400:3000:Cycle found in graph
AAI_6145=5:4:ERROR:6145:400:3000:Cannot create a nested/containment edge via relationship
AAI_6146=5:4:ERROR:6146:400:3000:Ambiguous identity map found, use a URI instead
AAI_6147=5:4:ERROR:6147:400:3000:Payload Limit Reached, reduce payload
+AAI_6148=5:4:INFO:6148:404:3001:Node Not Found. Start URI returned no vertexes, please check the start URI
#--- aaicsvp: 7101-7199
AAI_7101=5:4:ERROR:7101:500:3002:Unexpected error in CSV file processing
@@ -138,7 +144,6 @@ AAI_7119=5:4:ERROR:7119:500:3002:Unknown host
AAI_7202=5:4:ERROR:7202:500:3002:Error getting connection to odl
AAI_7203=5:4:ERROR:7203:500:3002:Unexpected error calling DataChangeNotification API
AAI_7204=5:4:ERROR:7204:500:3002:Error returned by DataChangeNotification API
-AAI_7205=5:4:ERROR:7205:500:3002:Unexpected error running notifySDNCOnUpdate
#AAI_7206=5:4:ERROR:7206:500:3002:Invalid data returned from ODL
#--- NotificationEvent, using UEB space
@@ -170,3 +175,8 @@ AAI_9107=5:0:WARN:9107:403:3300:SSL is not provided in request, please contact a
#--- aaiinstar: 9201-9299
#AAI_9201=5:4:ERROR:9201:500:3002:Unable to send notification
AAI_9202=5:4:ERROR:9202:500:3002:Unable to start a thread
+#--- GRM DME2: 9501-9599
+AAI_9501=5:4:WARN:9501:500:3002:Unable to register with GRM retrying
+AAI_9502=5:4:ERROR:9502:500:3002:Unable to register with GRM after exhausting all retries
+AAI_9503=5:4:WARN:9503:500:3002:Unable to successfully unpublish with GRM, recommend manual cleanup but not necessary
+
diff --git a/aai-traversal/src/main/resources/etc/appprops/gremlin-server-config.yaml b/aai-traversal/src/main/resources/etc/appprops/gremlin-server-config.yaml
deleted file mode 100644
index a9de31f..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/gremlin-server-config.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-hosts: [localhost]
-port: 8182
-serializer: { className: org.apache.tinkerpop.gremlin.driver.ser.GraphSONMessageSerializerV1d0 } \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/etc/appprops/janusgraph-cached.properties b/aai-traversal/src/main/resources/etc/appprops/janusgraph-cached.properties
index aecea7c..9f85c8e 100644
--- a/aai-traversal/src/main/resources/etc/appprops/janusgraph-cached.properties
+++ b/aai-traversal/src/main/resources/etc/appprops/janusgraph-cached.properties
@@ -2,7 +2,7 @@
# ============LICENSE_START=======================================================
# org.onap.aai
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,20 +16,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
query.fast-property=true
-query.smart-limit=false
# the following parameters are not reloaded automatically and require a manual bounce
-#storage.backend=inmemory
storage.backend=inmemory
storage.hostname=localhost
-#explicit Titan version in use - added for JanusGraph migration
-#graph.titan-version=1.0.0
-
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev02
diff --git a/aai-traversal/src/main/resources/etc/appprops/janusgraph-realtime.properties b/aai-traversal/src/main/resources/etc/appprops/janusgraph-realtime.properties
index 9e64ac2..f938182 100644
--- a/aai-traversal/src/main/resources/etc/appprops/janusgraph-realtime.properties
+++ b/aai-traversal/src/main/resources/etc/appprops/janusgraph-realtime.properties
@@ -2,7 +2,7 @@
# ============LICENSE_START=======================================================
# org.onap.aai
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,20 +16,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
query.fast-property=true
-query.smart-limit=false
# the following parameters are not reloaded automatically and require a manual bounce
-#storage.backend=inmemory
storage.backend=inmemory
storage.hostname=localhost
-#explicit Titan version in use - added for JanusGraph migration
-#graph.titan-version=1.0.0
-
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev02
diff --git a/aai-traversal/src/main/resources/etc/appprops/methodMapper.properties b/aai-traversal/src/main/resources/etc/appprops/methodMapper.properties
deleted file mode 100644
index d6c1158..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/methodMapper.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-{
- "ActiveAndAvailableInventory-Traversal" : [{
- "logicalName" : "getAAIResource",
- "method" : "get",
- "url" : "/aai/*"
- }, {
- "logicalName" : "putAAIResource",
- "method" : "put",
- "url" : "/aai/*"
- }, {
- "logicalName" : "deleteAAIResource",
- "method" : "delete",
- "url" : "/aai/*"
- }, {
- "logicalName" : "postAAIResource",
- "method" : "post",
- "url" : "/aai/*"
- }, {
- "logicalName" : "patchAAIResource",
- "method" : "patch",
- "url" : "/aai/*"
- }
- ]
-} \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/etc/appprops/preferredRoute.txt b/aai-traversal/src/main/resources/etc/appprops/preferredRoute.txt
deleted file mode 100644
index 662b0aa..0000000
--- a/aai-traversal/src/main/resources/etc/appprops/preferredRoute.txt
+++ /dev/null
@@ -1 +0,0 @@
-preferredRouteKey=MR1 \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getClfiRoadmTailSummary-1.0.json b/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getClfiRoadmTailSummary-1.0.json
index a30e797..771197f 100644
--- a/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getClfiRoadmTailSummary-1.0.json
+++ b/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getClfiRoadmTailSummary-1.0.json
@@ -1,94 +1,94 @@
-{
- "named-query-uuid" : "4f448e43-339f-4c1c-85f6-896c444e25ca",
- "named-query-name" : "GetClfiRoadmTailSummary",
- "named-query-version" : "1.0",
- "description" : "Named query - Get CLFI ROADM Tail Summary",
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
- } ]
- } ]
- }
- }, {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "c1d4305f-cdbd-4bbe-9069-a2f4978fd89e"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "2e1a602a-acd8-4f78-94ff-618b802a303b"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "82194af1-3c2c-485a-8f44-420e22a9eaa4"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
- } ]
- } ]
- }
- } ]
- }
+{
+ "named-query-uuid" : "4f448e43-339f-4c1c-85f6-896c444e25ca",
+ "named-query-name" : "GetClfiRoadmTailSummary",
+ "named-query-version" : "1.0",
+ "description" : "Named query - Get CLFI ROADM Tail Summary",
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
+ } ]
+ } ]
+ }
+ }, {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "c1d4305f-cdbd-4bbe-9069-a2f4978fd89e"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "2e1a602a-acd8-4f78-94ff-618b802a303b"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "82194af1-3c2c-485a-8f44-420e22a9eaa4"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
+ } ]
+ } ]
+ }
+ } ]
+ }
} \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getRouterRoadmTailSummary-1.0.json b/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getRouterRoadmTailSummary-1.0.json
index 6f775e8..27e198d 100644
--- a/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getRouterRoadmTailSummary-1.0.json
+++ b/aai-traversal/src/main/resources/etc/scriptdata/named-query-json/getRouterRoadmTailSummary-1.0.json
@@ -1,120 +1,120 @@
-{
- "named-query-uuid" : "cbf22b8a-f29a-4b9b-a466-a878095b258a",
- "named-query-name" : "GetRouterRoadmTailSummary",
- "named-query-version" : "1.0",
- "description" : "Named query - Get Router ROADM Tail Summary",
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
- } ]
- } ]
- }
- }, {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "named-query-elements" : {
- "named-query-element" : [ {
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "c1d4305f-cdbd-4bbe-9069-a2f4978fd89e"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "2e1a602a-acd8-4f78-94ff-618b802a303b"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "82194af1-3c2c-485a-8f44-420e22a9eaa4"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
- } ]
- } ]
- }
- } ]
- },
- "relationship-list" : {
- "relationship" : [ {
- "related-to" : "model",
- "relationship-data" : [ {
- "relationship-key" : "model.model-invariant-id",
- "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
- } ]
- } ]
- }
- } ]
- }
+{
+ "named-query-uuid" : "cbf22b8a-f29a-4b9b-a466-a878095b258a",
+ "named-query-name" : "GetRouterRoadmTailSummary",
+ "named-query-version" : "1.0",
+ "description" : "Named query - Get Router ROADM Tail Summary",
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
+ } ]
+ } ]
+ }
+ }, {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "named-query-elements" : {
+ "named-query-element" : [ {
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "c1d4305f-cdbd-4bbe-9069-a2f4978fd89e"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "2e1a602a-acd8-4f78-94ff-618b802a303b"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "82194af1-3c2c-485a-8f44-420e22a9eaa4"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "fe012535-2c31-4a39-a739-612374c638a0"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "94043c37-4e73-439c-a790-0fdd697924cd"
+ } ]
+ } ]
+ }
+ } ]
+ },
+ "relationship-list" : {
+ "relationship" : [ {
+ "related-to" : "model",
+ "relationship-data" : [ {
+ "relationship-key" : "model.model-invariant-id",
+ "relationship-value" : "862b25a1-262a-4961-bdaa-cdc55d69785a"
+ } ]
+ } ]
+ }
+ } ]
+ }
} \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/etc/sysprops/sys-props.properties b/aai-traversal/src/main/resources/etc/sysprops/sys-props.properties
deleted file mode 100644
index 44d58d5..0000000
--- a/aai-traversal/src/main/resources/etc/sysprops/sys-props.properties
+++ /dev/null
@@ -1,140 +0,0 @@
-#This file is used for defining AJSC system properties for different configuration schemes and is necessary for the AJSC to run properly.
-#The sys-props.properties file is used for running locally. The template.sys-props.properties file will be used when deployed
-#to a SOA/CSI Cloud node.
-
-#AJSC System Properties. The following properties are required for ALL AJSC services. If you are adding System Properties for your
-#particular service, please add them AFTER all AJSC related System Properties.
-
-#For Cadi Authorization, use value="authentication-scheme-1
-CadiAuthN=authentication-scheme-1
-
-#For Basic Authorization, use value="authentication-scheme-1
-authN=authentication-scheme-2
-
-#Persistence used for AJSC meta-data storage. For most environments, "file" should be used.
-ajscPersistence=file
-
-# If using hawtio for local development, these properties will allow for faster server startup and usage for local development
-hawtio.authenticationEnabled=false
-hawtio.config.pullOnStartup=false
-
-#Removes the extraneous restlet console output
-org.restlet.engine.loggerFacadeClass=org.restlet.ext.slf4j.Slf4jLoggerFacade
-
-#server.host property to be enabled for local DME2 related testing
-#server.host=<Your network IP address>
-
-#Enable/disable SSL (values=true/false). This property also determines which protocol to use (https if true, http otherwise), to register services into GRM through DME2.
-enableSSL=false
-
-#Enable/disable csi logging (values=true/false). This can be disabled during local development
-csiEnable=false
-
-#Enable/disable EJB Container
-ENABLE_EJB=false
-
-#Enable/disable OSGI
-isOSGIEnable=false
-
-#Configure JMS Queue (WMQ/TIBCO)
-JMS_BROKER=WMQ
-
-#JMS properties needed for CSI Logging
-JMS_TIBCO_PROVIDER_URL=ONAPserverTBD
-JMS_LOGGER_USER_NAME=spm2
-JMS_LOGGER_PASSWORD=its4test
-JMS_LOGGER_AUDIT_QUEUE_BINDING=pub.m2e.audit.logger.queue
-JMS_LOGGER_PERF_QUEUE_BINDING=log.csi.performance02
-
-#WMQ connectivity
-JMS_WMQ_PROVIDER_URL=aftdsc://AFTUAT/34.07/-84.28
-JMS_WMQ_CONNECTION_FACTORY_NAME=aftdsc://AFTUAT/?service=CSILOG,version=1.0,bindingType=fusionBus,envContext=Q,Q25A=YES
-JMS_WMQ_INITIAL_CONNECTION_FACTORY_NAME=com.att.aft.jms.FusionCtxFactory
-JMS_WMQ_AUDIT_DESTINATION_NAME=queue:///CSILOGQL.M2E.DASHBOARD01.NOT.Q25A
-JMS_WMQ_PERF_DESTINATION_NAME=queue:///CSILOGQL.M2E.PERFORMANCE01.NOT.Q25A
-
-
-#CSI related variables for CSM framework
-csm.hostname=d1a-m2e-q112m2e1.edc.cingular.net
-
-#Enable/disable endpoint level logging (values=true/false). This can be disabled during local development
-endpointLogging=false
-
-#SOA_CLOUD_ENV is used to register your service with dme2 and can be turned off for local development (values=true/false).
-SOA_CLOUD_ENV=false
-
-#CONTINUE_ON_LISTENER_EXCEPTION will exit the application if there is a DME2 exception at the time of registration.
-CONTINUE_ON_LISTENER_EXCEPTION=false
-
-#Jetty Container ThreadCount Configuration Variables
-AJSC_JETTY_ThreadCount_MIN=10
-AJSC_JETTY_ThreadCount_MAX=500
-AJSC_JETTY_IDLETIME_MAX=60000
-AJSC_JETTY_BLOCKING_QUEUE_SIZE=100
-
-#Camel Context level default threadPool Profile configuration
-CAMEL_POOL_SIZE=10
-CAMEL_MAX_POOL_SIZE=20
-CAMEL_KEEP_ALIVE_TIME=60
-CAMEL_MAX_QUEUE_SIZE=1000
-
-#File Monitor configurations
-ssf_filemonitor_polling_interval=5
-ssf_filemonitor_threadpool_size=10
-
-#GRM/DME2 System Properties
-AFT_DME2_CONN_IDLE_TIMEOUTMS=5000
-AJSC_ENV=SOACLOUD
-
-SOACLOUD_NAMESPACE=com.att.ajsc
-SOACLOUD_ENV_CONTEXT=DEV
-SOACLOUD_PROTOCOL=http
-SOACLOUD_ROUTE_OFFER=DEFAULT
-
-sslport=8446
-server.port=8083
-
-AFT_LATITUDE=23.4
-AFT_LONGITUDE=33.6
-AFT_ENVIRONMENT=AFTUAT
-
-#Restlet Component Default Properties
-RESTLET_COMPONENT_CONTROLLER_DAEMON=true
-RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS=100
-RESTLET_COMPONENT_INBOUND_BUFFER_SIZE=8192
-RESTLET_COMPONENT_MIN_THREADS=1
-RESTLET_COMPONENT_MAX_THREADS=10
-RESTLET_COMPONENT_LOW_THREADS=8
-RESTLET_COMPONENT_MAX_QUEUED=0
-RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST=-1
-RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS=-1
-RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE=8192
-RESTLET_COMPONENT_PERSISTING_CONNECTIONS=true
-RESTLET_COMPONENT_PIPELINING_CONNECTIONS=false
-RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS=60000
-RESTLET_COMPONENT_USE_FORWARDED_HEADER=false
-RESTLET_COMPONENT_REUSE_ADDRESS=true
-
-#Externalized jar and properties file location. In CSI environments, there are a few libs that have been externalized to aid
-#in CSTEM maintenance of the versions of these libs. The most important to the AJSC is the DME2 lib. Not only is this lib necessary
-#for proper registration of your AJSC service on a node, but it is also necessary for running locally as well. Another framework
-#used in CSI envs is the CSM framework. These 2 framework libs are shown as "provided" dependencies within the pom.xml. These
-#dependencies will be copied into the target/commonLibs folder with the normal "mvn clean package" goal of the AJSC. They will
-#then be added to the classpath via AJSC_EXTERNAL_LIB_FOLDERS system property. Any files (mainly property files) that need
-#to be on the classpath should be added to the AJSC_EXTERNAL_PROPERTIES_FOLDERS system property. The default scenario when
-#testing your AJSC service locally will utilize the target/commonLibs directory for DME2 and CSM related artifacts and 2
-#default csm properties files will be used for local testing with anything CSM knorelated.
-#NOTE: we are using maven-replacer-plugin to replace "(doubleUnderscore)basedir(doubleUnderscore)" with ${basedir} within the
-#target directory for running locally. Multiple folder locations can be separated by the pipe ("|") character.
-#Please, NOTE: for running locally, we are setting this system property in the antBuild/build.xml "runLocal" target and in the
-#"runAjsc" profile within the pom.xml. This is to most effectively use maven variables (${basedir}, most specifically. Therefore,
-#when running locally, the following 2 properties should be set within the profile(s) themselves.
-#Example: target/commonLibs|target/otherLibs
-#AJSC_EXTERNAL_LIB_FOLDERS=__basedir__/target/commonLibs
-#AJSC_EXTERNAL_PROPERTIES_FOLDERS=__basedir__/ajsc-shared-config/etc
-#End of AJSC System Properties
-
-#Service System Properties. Please, place any Service related System Properties below.
-KEY_STORE_PASSWORD=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
-KEY_MANAGER_PASSWORD=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
-AAI_CORE_VERSION=1.2.0-SNAPSHOT
diff --git a/aai-traversal/src/main/resources/etc/sysprops/template.sys-props.properties b/aai-traversal/src/main/resources/etc/sysprops/template.sys-props.properties
deleted file mode 100644
index c9060f9..0000000
--- a/aai-traversal/src/main/resources/etc/sysprops/template.sys-props.properties
+++ /dev/null
@@ -1,115 +0,0 @@
-#This file is used for defining AJSC system properties for different configuration schemes and is necessary for the AJSC to run properly.
-#The sys-props.properties file is used for running locally.
-
-#For Cadi Authorization, use value="authentication-scheme-1"
-CadiAuthN=__AJSC_CADI_AUTHN__
-
-#For Basic Authorization, use value="authentication-scheme-1"
-authN=__AJSC_AUTH_SCHEME__
-
-#Persistence used for AJSC meta-data storage. For most environments, "file" should be used.
-ajscPersistence=__AJSC_PERSISTENCE__
-
-#Configure externalized logback location
-logback.configurationFile=__LOGBACK_CONFIG_FILE__
-
-# If using hawtio for local development, these properties will allow for faster server startup and usage for local development.
-# You may wish to deploy hawtio.war to dev/test environments to help debugging as well, but should not be used in PROD env.
-hawtio.authenticationEnabled=__HAWTIO_AUTHENTICATION_ENABLED__
-hawtio.config.pullOnStartup=__HAWTIO_CONFIG_PULLONSTARTUP__
-
-#Removes the extraneous restlet console output
-org.restlet.engine.loggerFacadeClass=org.restlet.ext.slf4j.Slf4jLoggerFacade
-
-#Enable/disable SSL (values=true/false).
-enableSSL=__AJSC_ENABLE_SSL__
-
-#Enable/disable csi logging (values=true/false).
-csiEnable=__CSI_ENABLE__
-
-#Enable/disable EJB Container
-ENABLE_EJB=__ENABLE_EJB__
-
-#Enable/disable OSGI
-isOSGIEnable=__OSGI_ENABLE__
-
-#Configure JMS Queue (WMQ/TIBCO)
-JMS_BROKER=WMQ
-
-#JMS properties needed for CSI Logging
-JMS_TIBCO_PROVIDER_URL=ONAPserverTBD
-JMS_LOGGER_USER_NAME=spm2
-JMS_LOGGER_PASSWORD=
-JMS_LOGGER_AUDIT_QUEUE_BINDING=pub.m2e.audit.logger.queue
-JMS_LOGGER_PERF_QUEUE_BINDING=log.csi.performance02
-
-#WMQ connectivity
-JMS_WMQ_PROVIDER_URL=__JMS_WMQ_PROVIDER_URL__
-JMS_WMQ_CONNECTION_FACTORY_NAME=__JMS_WMQ_CONNECTION_FACTORY_NAME__
-JMS_WMQ_INITIAL_CONNECTION_FACTORY_NAME=__JMS_WMQ_INITIAL_CONNECTION_FACTORY_NAME__
-JMS_WMQ_AUDIT_DESTINATION_NAME=__JMS_WMQ_AUDIT_DESTINATION_NAME__
-JMS_WMQ_PERF_DESTINATION_NAME=__JMS_WMQ_PERF_DESTINATION_NAME__
-
-#CSI related variables for CSM framework
-csm.hostname=d1a-m2e-q112m2e1.edc.cingular.net
-
-#Enable/disable endpoint level logging (values=true/false).
-endpointLogging=__END_POINT_LEVEL_LOGGING__
-
-#SOA_CLOUD_ENV is used to register your service with dme2 (values=true/false).
-SOA_CLOUD_ENV=__SOA_CLOUD_ENV__
-
-#Jetty Container ThreadCount Configuration Variables
-AJSC_JETTY_ThreadCount_MAX=__AJSC_JETTY_ThreadCount_MAX__
-AJSC_JETTY_ThreadCount_MIN=__AJSC_JETTY_ThreadCount_MIN__
-AJSC_JETTY_IDLETIME_MAX=__AJSC_JETTY_IDLETIME_MAX__
-AJSC_SERVICE_NAMESPACE=ActiveAndAvailableInventory-Traversal
-AJSC_SERVICE_VERSION=__AJSC_SERVICE_VERSION__
-AJSC_JETTY_BLOCKING_QUEUE_SIZE=100
-
-#Camel Context level default threadPool Profile configuration
-CAMEL_POOL_SIZE=__CAMEL_POOL_SIZE__
-CAMEL_MAX_POOL_SIZE=__CAMEL_MAX_POOL_SIZE__
-CAMEL_KEEP_ALIVE_TIME=__CAMEL_KEEP_ALIVE_TIME__
-CAMEL_MAX_QUEUE_SIZE=__CAMEL_MAX_QUEUE_SIZE__
-
-#File Monitor configurations
-ssf_filemonitor_polling_interval=__AJSC_SSF_FILE_MONITOR_POLLING_INTERVAL__
-ssf_filemonitor_threadpool_size=__AJSC_SSF_FILE_MONITOR_THREAD_POOL_SIZE__
-
-#GRM/DME2 System Properties below
-AFT_DME2_CONN_IDLE_TIMEOUTMS=__AFT_DME2_CONN_IDLE_TIMEOUTMS__
-AJSC_ENV=__AJSC_ENV__
-SOACLOUD_NAMESPACE=__SOA_CLOUD_NAMESPACE__
-SOACLOUD_ENV_CONTEXT=__SCLD_ENV__
-SOACLOUD_PROTOCOL=__SOACLOUD_PROTOCOL__
-SOACLOUD_ROUTE_OFFER=__AAIENV__
-sslport=8446
-server.port=8083
-AFT_LATITUDE=__LATITUDE__
-AFT_LONGITUDE=__LONGITUDE__
-AFT_ENVIRONMENT=__AFT_ENVIRONMENT__
-
-#Restlet Component Properties
-RESTLET_COMPONENT_CONTROLLER_DAEMON=__RESTLET_COMPONENT_CONTROLLER_DAEMON__
-RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS=__RESTLET_COMPONENT_CONTROLLER_SLEEP_TIME_MS__
-RESTLET_COMPONENT_INBOUND_BUFFER_SIZE=__RESTLET_COMPONENT_INBOUND_BUFFER_SIZE__
-RESTLET_COMPONENT_MIN_THREADS=__RESTLET_COMPONENT_MIN_THREADS__
-RESTLET_COMPONENT_MAX_THREADS=__RESTLET_COMPONENT_MAX_THREADS__
-RESTLET_COMPONENT_LOW_THREADS=__RESTLET_COMPONENT_LOW_THREADS__
-RESTLET_COMPONENT_MAX_QUEUED=__RESTLET_COMPONENT_MAX_QUEUED__
-RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST=__RESTLET_COMPONENT_MAX_CONNECTIONS_PER_HOST__
-RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS=__RESTLET_COMPONENT_MAX_TOTAL_CONNECTIONS__
-RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE=__RESTLET_COMPONENT_OUTBOUND_BUFFER_SIZE__
-RESTLET_COMPONENT_PERSISTING_CONNECTIONS=__RESTLET_COMPONENT_PERSISTING_CONNECTIONS__
-RESTLET_COMPONENT_PIPELINING_CONNECTIONS=__RESTLET_COMPONENT_PIPELINING_CONNECTIONS__
-RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS=__RESTLET_COMPONENT_THREAD_MAX_IDLE_TIME_MS__
-RESTLET_COMPONENT_USE_FORWARDED_HEADER=__RESTLET_COMPONENT_USE_FORWARDED_HEADER__
-RESTLET_COMPONENT_REUSE_ADDRESS=__RESTLET_COMPONENT_REUSE_ADDRESS__
-
-AJSC_EXTERNAL_LIB_FOLDERS=__CSM_LIB__|__DME2_LIB__|__AJSC_EXTERNAL_LIB_FOLDERS__
-AJSC_EXTERNAL_PROPERTIES_FOLDERS=__AJSC_SHARED_CONFIG__|__AJSC_EXTERNAL_PROPERTIES_FOLDERS__
-
-KEY_STORE_PASSWORD=__KEY_STORE_PASSWORD__
-KEY_MANAGER_PASSWORD=__KEY_MANAGER_PASSWORD__
-AAI_CORE_VERSION=1.2.0-SNAPSHOT
diff --git a/aai-traversal/src/main/resources/retired.properties b/aai-traversal/src/main/resources/retired.properties
new file mode 100644
index 0000000..2261713
--- /dev/null
+++ b/aai-traversal/src/main/resources/retired.properties
@@ -0,0 +1,5 @@
+# Retired properties
+retired.api.pattern.list=\
+ ^/aai/v[2-7]+/.*$\
+
+retired.api.all.versions= \ No newline at end of file
diff --git a/aai-traversal/src/main/resources/schema/UebEventLogEntry.xsd b/aai-traversal/src/main/resources/schema/UebEventLogEntry.xsd
deleted file mode 100644
index edc8924..0000000
--- a/aai-traversal/src/main/resources/schema/UebEventLogEntry.xsd
+++ /dev/null
@@ -1,43 +0,0 @@
-<!--
-
- ============LICENSE_START=======================================================
- org.onap.aai
- ================================================================================
- Copyright © 2017 AT&T Intellectual Property. All rights reserved.
- ================================================================================
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- ============LICENSE_END=========================================================
-
- ECOMP is a trademark and service mark of AT&T Intellectual Property.
-
--->
-<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
- <xs:complexType name="UebEventLogEntry">
- <xs:sequence>
- <xs:element type="xs:string" name="transactionLogEntryId"/>
- <xs:element type="xs:string" name="notificationId"/>
- <xs:element type="xs:string" name="topic"/>
- <xs:element type="xs:string" name="action"/>
- <xs:element minOccurs="0" type="xs:string" name="entityLink"/>
- <xs:element type="xs:string" name="payload"/>
- <xs:element type="xs:string" name="status"/>
- <xs:element minOccurs="0" type="xs:string" name="owner"/>
- <xs:element type="xs:long" name="lastUpdateTimestamp"/>
- </xs:sequence>
- </xs:complexType>
- <xs:complexType name="UebEventLogEntries">
- <xs:sequence>
- <xs:element minOccurs="0" maxOccurs="unbounded" name="UebEventLogEntries" type="UebEventLogEntry"/>
- </xs:sequence>
- </xs:complexType>
-</xs:schema>
diff --git a/aai-traversal/src/main/resources/etc/query/stored-queries.json b/aai-traversal/src/main/resources/schema/onap/query/stored-queries.json
index d511e5a..a3f6a62 100644
--- a/aai-traversal/src/main/resources/etc/query/stored-queries.json
+++ b/aai-traversal/src/main/resources/schema/onap/query/stored-queries.json
@@ -1,7 +1,7 @@
{
"stored-queries":[{
"cloud-region-fromVnf":{
- "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x').cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x')), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'))).cap('x').unfold().dedup()"
}
},{
"linked-devices":{
@@ -16,7 +16,7 @@
}
},{
"pserver-fromConfiguration":{
- "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'logical-link').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'pnf').store('x')).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'logical-link').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').store('x').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vserver').createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'pnf').store('x')).cap('x').unfold().dedup()"
}
},{
"spaas-topology-fromServiceInstance":{
@@ -53,6 +53,13 @@
"stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'complex', 'l3-network').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'cloud-region').store('x').cap('x').unfold()"
}
},{
+ "pnf-from-model":{
+ "query":{
+ "required-properties":["equipVendor","equipModel"]
+ },
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'pnf').getVerticesByProperty('equip-vendor', equipVendor).getVerticesByProperty('equip-model', equipModel).store('x').cap('x').unfold().dedup()"
+ }
+ },{
"pnf-fromModel-byRegion":{
"query":{
"required-properties":["equipVendor","equipModel","cloudRegionId"]
@@ -85,7 +92,7 @@
"query":{
"required-properties":["interfaceId"]
},
- "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'logical-link').where(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').getVerticesByProperty('interface-id', interfaceId).store('x')).union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'pnf').store('x')).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'logical-link').where(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').getVerticesByProperty('interface-id', interfaceId).store('x')).union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vserver').createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'pnf').store('x')).cap('x').unfold().dedup()"
}
},{
"cloudRegion-fromCountry":{
@@ -112,9 +119,9 @@
},{
"vserver-fromVnf":{
"query":{
- "required-properties":["nfcFunction"]
+ "required-properties":["nfcNamingCode"]
},
- "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').getVerticesByProperty('nfc-function', nfcFunction).store('x').createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x')).cap('x').unfold().dedup()"
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').getVerticesByProperty('nfc-naming-code', nfcNamingCode).store('x').createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x')).cap('x').unfold().dedup()"
}
},{
"sites-byCloudRegionId":{
@@ -181,7 +188,7 @@
}
},{
"vnf-topology-fromServiceInstance":{
- "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').createEdgeTraversal(EdgeType.TREE, 'service-subscription', 'customer').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'allotted-resource').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'generic-vnf').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'volume-group').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x')))).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').createEdgeTraversal(EdgeType.TREE, 'service-subscription', 'customer').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'configuration').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'allotted-resource').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'generic-vnf').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'l3-network').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'vlan-tag').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'l3-network').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'vlan-tag').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'volume-group').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x')))).cap('x').unfold().dedup()"
}
},{
"ucpe-topology":{
@@ -204,7 +211,7 @@
}
},{
"pending-topology-detail":{
- "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'platform').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'line-of-business').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'owning-entity').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'project').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vnfc', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vnfc', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vip-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vip-ipv4-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vip-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vip-ipv6-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x')).dedup()).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'platform').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'line-of-business').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'owning-entity').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'project').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vf-module', 'vnfc').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vnfc', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vnfc', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vip-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vip-ipv4-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vip-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'vip-ipv6-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x')).dedup()).cap('x').unfold().dedup()"
}
},{
"vnfs-fromServiceInstance":{
@@ -228,7 +235,7 @@
}
},{
"topology-detail-fromVserver":{
- "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'subnet').store('x').createEdgeTraversal(EdgeType.TREE, 'subnet', 'l3-network').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').createEdgeTraversal(EdgeType.COUSIN, 'pserver', 'complex').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'generic-vnf').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'platform').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'line-of-business').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'owning-entity').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'project').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').store('x'))).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE,'vserver','l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE,'l-interface','l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN,'l3-interface-ipv4-address-list','subnet').store('x').createEdgeTraversal(EdgeType.TREE,'subnet','l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE,'l-interface','l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN,'l3-interface-ipv6-address-list','subnet').store('x').createEdgeTraversal(EdgeType.TREE,'subnet','l3-network').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE,'vserver','tenant').store('x').createEdgeTraversal(EdgeType.TREE,'tenant','cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'vserver','pserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'pserver','complex').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'pserver','availability-zone').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'vserver','generic-vnf').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE,'generic-vnf','vf-module').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','platform').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','line-of-business').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','virtual-data-center').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','volume-group').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','service-instance').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'service-instance','owning-entity').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'service-instance','project').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'generic-vnf','vnfc').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'vserver','image').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'vserver','flavor').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'vserver','vf-module').store('x')).cap('x').unfold().dedup();"
}
},{
"vserverlogicallink-frompServer":{
@@ -236,7 +243,7 @@
}
},{
"vnf-topology-fromVnf":{
- "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').createEdgeTraversal(EdgeType.TREE, 'service-subscription', 'customer').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'allotted-resource').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'volume-group').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x'),),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x'),)).cap('x').unfold().dedup()"
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').store('x').createEdgeTraversal(EdgeType.TREE, 'service-subscription', 'customer').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'allotted-resource').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'volume-group').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x'),),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv4-address-list', 'l3-network').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-interface-ipv6-address-list', 'l3-network').store('x'),))).cap('x').unfold().dedup()"
}
},{
"service-topology":{
@@ -247,25 +254,184 @@
"stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').store('x').createEdgeTraversal(EdgeType.TREE, 'service-subscription', 'customer').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'forwarding-path').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'forwarding-path', 'configuration').store('x').createEdgeTraversal(EdgeType.TREE, 'configuration', 'evc').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'forwarding-path', 'forwarder').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'forwarder', 'configuration').store('x').createEdgeTraversal(EdgeType.TREE, 'configuration', 'forwarder-evc').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'forwarder', 'p-interface').store('x').createEdgeTraversal(EdgeType.TREE, 'p-interface', 'pnf').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'forwarder', 'lag-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'lag-interface', 'pnf').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'lag-interface', 'logical-link').getVerticesByProperty('link-type', 'LAG').store('x'))))).cap('x').unfold().dedup()"
}
},{
- "count-vnf-byVnfType":{
+ "count-vnf-byVnfType":{
"stored-query":"builder.getVerticesByProperty('aai-node-type', 'generic-vnf').groupCount().by('vnf-type').store('x').unfold()"
- }
+ }
},{
- "pservers-withNoComplex":{
+ "pservers-withNoComplex":{
"stored-query":"builder.getVerticesByProperty('aai-node-type', 'pserver').where(builder.newInstance().not(builder.newInstance().both().getVerticesByProperty('aai-node-type', 'complex'))).store('x').unfold()"
- }
+ }
},{
"gfp-vserver-data":{
"stored-query":"builder.createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').store('x')).cap('x').unfold().dedup()"
}
},{
- "gfp-vnf-data":{
+ "gfp-vnf-data":{
"stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list').store('x'))),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'lag-interface').createEdgeTraversal(EdgeType.TREE, 'lag-interface', 'l-interface').store('x').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnf-image').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'network-profile').store('x')).cap('x').unfold().dedup()"
}
},{
+
+ "related-to":{
+ "query":{
+ "required-properties":["startingNodeType","relatedToNodeType"]
+ },
+ "stored-query":"builder.createEdgeTraversal(startingNodeType, relatedToNodeType).store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "gfp-vserver":{
+ "stored-query":"builder.where(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant'))"
+ }
+ },{
+ "sriov-topology-fromVnf":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'sriov-vf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'sriov-vf', 'sriov-pf').createEdgeTraversal(EdgeType.TREE, 'sriov-pf','p-interface').createEdgeTraversal(EdgeType.COUSIN, 'p-interface', 'physical-link').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "vserver-l-interfaces-fromVnfc":{
+ "query":{
+ "optional-properties":["networkName"]
+ },
+ "stored-query":"builder.where(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').getVerticesByProperty('network-name',networkName)).createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').getVerticesByProperty('network-name',networkName).store('x').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l-interface').store('x').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "vnfs-vlans-fromServiceInstance":{
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'configuration').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'generic-vnf').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'configuration', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').store('x')).cap('x').unfold().dedup()"
+ }
+ },{
+ "getClfiRoadmTailSummary":{
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'p-interface').store('x').createEdgeTraversal(EdgeType.TREE,'p-interface', 'pnf').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'logical-link').store('x').createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'service-instance').store('x').createEdgeTraversal(EdgeType.TREE,'service-instance', 'service-subscription').store('x').createEdgeTraversal(EdgeType.TREE,'service-subscription', 'customer').store('x')).cap('x').unfold().dedup()"
+ }
+ },{
+ "getRouterRoadmTailSummary":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.TREE,'pnf', 'p-interface').store('x').createEdgeTraversal(EdgeType.COUSIN,'p-interface', 'logical-link').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'p-interface').store('x').createEdgeTraversal(EdgeType.TREE,'p-interface', 'pnf').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'logical-link').store('x').createEdgeTraversal(EdgeType.COUSIN,'logical-link', 'service-instance').store('x').createEdgeTraversal(EdgeType.TREE,'service-instance', 'service-subscription').store('x').createEdgeTraversal(EdgeType.TREE,'service-subscription', 'customer').store('x')).cap('x').unfold().dedup()"
+ }
+ },{
+ "topology-summary-fromCloudRegion":{
+ "stored-query": "builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'cloud-region', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'vnfc').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vnfc').store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'cloud-region', 'pserver').store('x')).cap('x').unfold().dedup()"
+ }
+ }, {
+ "vservers-fromPserver-tree": {
+ "stored-query": "builder.createEdgeTraversal(EdgeType.COUSIN, 'pserver', 'vserver').tree()"
+ }
+ },{
+ "cloud-region-and-source-FromConfiguration":{
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'configuration','logical-link').createEdgeTraversalWithLabels(EdgeType.COUSIN, 'logical-link', 'l-interface',new ArrayList<>(Arrays.asList('org.onap.relationships.inventory.Source'))).createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vserver').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'vf-module').createEdgeTraversal(EdgeType.TREE, 'vf-module', 'generic-vnf').store('x')).cap('x').unfold().dedup()"
+ }
+ },{
+ "destination-FromConfiguration":{
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'configuration','logical-link').union(builder.newInstance().createEdgeTraversalWithLabels(EdgeType.COUSIN, 'logical-link', 'l-interface', new ArrayList<>(Arrays.asList('org.onap.relationships.inventory.Destination'))).createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vserver').createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'vf-module').createEdgeTraversal(EdgeType.TREE, 'vf-module', 'generic-vnf').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'generic-vnf').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'pnf').store('x')).cap('x').unfold().dedup()"
+ }
+ },{
+ "topology-summary-fromTenant":{
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'tenant', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'generic-vnf').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x'))).cap('x').unfold().dedup()"
+ }
+ },{
+ "vfModule-fromServiceInstance":{
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'service-instance','generic-vnf').createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "getComplexByPnfName":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN,'pnf', 'complex').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "getComplexFromHostname":{
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN,'pserver', 'complex').tree()"
+ }
+ },{
+ "instance-groups-byCloudRegion":{
+ "query":{
+ "required-properties":["type","role","function"]
+ },
+ "stored-query":"builder.createEdgeTraversal(EdgeType.COUSIN, 'cloud-region', 'instance-group').getVerticesByProperty('instance-group-type', type).getVerticesByProperty('instance-group-role', role).getVerticesByProperty('instance-group-function', function).store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "network-collection-ByServiceInstance":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'collection').store('x').createEdgeTraversal(EdgeType.COUSIN, 'collection', 'instance-group').store('x').createEdgeTraversal(EdgeType.COUSIN, 'instance-group', 'l3-network').store('x').cap('x').unfold().dedup()"
+ }
+ },{
"containment-path":{
"stored-query":"builder.until(builder.newInstance().not(builder.newInstance().getParentEdge())).repeat(builder.newInstance().getParentVertex()).path()"
}
- }
- ]
+ },{
+ "getSvcSubscriberModelInfo":{
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'service-instance', 'service-subscription').store('x'),builder.newInstance().createPrivateEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'model-ver').store('x')).cap('x').unfold().dedup()"
+ }
+ },
+ {
+ "getLogicalLinkByCloudRegionId": {
+ "stored-query": "builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'cloud-region', 'logical-link').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "getPinterfacePhysicalLinkBySvcInstId":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN,'service-instance', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN,'generic-vnf', 'vserver').store('x').createEdgeTraversal(EdgeType.COUSIN,'vserver', 'pserver').store('x').createEdgeTraversal(EdgeType.TREE,'pserver', 'p-interface').store('x').createEdgeTraversal(EdgeType.COUSIN,'p-interface', 'physical-link').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "topology-detail-fromVnf":{
+ "stored-query":"builder.store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'vnfc').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x').createEdgeTraversal(EdgeType.TREE, 'cloud-region', 'availability-zone').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'image').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'flavor').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').store('x').createEdgeTraversal(EdgeType.COUSIN, 'pserver', 'complex').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface','l3-interface-ipv4-address-list').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list').store('x'))),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'vf-module').createEdgeTraversal(EdgeType.COUSIN, 'vf-module', 'volume-group').store('x')).cap('x').unfold().dedup()"}
+ },{
+ "vnf-to-service-instance":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'service-instance').store('x').createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'generic-vnf').store('x').cap('x').unfold().dedup()"}
+ },{
+ "getServiceTopology":{
+ "stored-query":"builder.union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'generic-vnf').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list'))),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list'))),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'pserver').createEdgeTraversal(EdgeType.COUSIN, 'pserver', 'complex'))),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'allotted-resource').createEdgeTraversal(EdgeType.TREE, 'allotted-resource', 'service-instance').createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'generic-vnf').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list'))),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'l3-interface-ipv6-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv4-address-list'),builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vlan', 'l3-interface-ipv6-address-list')))))).tree()"}
+ },
+ {
+ "getL3networkCloudRegionByNetworkRole": {
+ "stored-query": "builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'generic-vnf').store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x').createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x').cap('x').unfold().dedup()"
+ }
+ },{
+ "getDHVLogicalLink": {
+ "stored-query": "builder.createEdgeTraversal(EdgeType.TREE, 'generic-vnf', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'vlan').createEdgeTraversal(EdgeType.COUSIN, 'vlan', 'logical-link').tree()"
+ }
+ },
+ {
+ "pserver-fromHostnameFirstToken": {
+ "query":{
+ "required-properties":["hostnameFirstToken","sourcesOfTruth"]
+ },
+ "stored-query": "builder.getVerticesStartsWithProperty('hostname', hostnameFirstToken).getVerticesByProperty('source-of-truth', new ArrayList<>(Arrays.asList(sourcesOfTruth)))"
+ }
+ },
+ {
+ "pserver-fromFqdnFirstToken": {
+ "query":{
+ "required-properties":["fqdnFirstToken","sourcesOfTruth"]
+ },
+ "stored-query": "builder.getVerticesStartsWithProperty('fqdn', fqdnFirstToken).getVerticesByProperty('source-of-truth', new ArrayList<>(Arrays.asList(sourcesOfTruth)))"
+ }
+ },
+ {
+ "getLinterface-fromNewvce": {
+ "query":{
+ "required-properties":["interfaceRole1","interfaceRole2"]
+ },
+ "stored-query": "builder.createEdgeTraversal(EdgeType.TREE, 'newvce', 'l-interface').getVerticesByProperty('interface-role', interfaceRole1).createEdgeTraversal(EdgeType.COUSIN, 'l-interface', 'logical-link').createEdgeTraversal(EdgeType.COUSIN, 'logical-link', 'l-interface').getVerticesByProperty('interface-role', interfaceRole2).store('x').cap('x').unfold().dedup()"
+ }
+ },
+ {
+ "l-interface-to-CP": {
+ "query":{
+ "required-properties":["isProviderNetwork"]
+ },
+ "stored-query": "builder.union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vnfc', 'cp').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'cp', 'vlan-tag').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'cp', 'l3-network').getVerticesByBooleanProperty('is-provider-network', isProviderNetwork).store('x')),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vnfc', 'vserver').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').store('x'))).cap('x').unfold().dedup()"
+ }
+ },
+ {
+ "getNetworks": {
+ "query":{
+ "required-properties":["networkRole","cloudRegionId"]
+ },
+ "stored-query": "builder.createEdgeTraversal(EdgeType.COUSIN, 'owning-entity','service-instance').createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'l3-network').getVerticesByProperty('network-role', networkRole).where(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'cloud-region').getVerticesByProperty('cloud-region-id', cloudRegionId)).store('x').cap('x').unfold().dedup()"
+ }
+ },
+ {
+ "fabric-information-fromVnf":{
+ "stored-query":"builder.store('x').createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'l-interface').createEdgeTraversal(EdgeType.TREE, 'l-interface', 'sriov-vf').createEdgeTraversal(EdgeType.COUSIN, 'sriov-vf', 'sriov-pf').createEdgeTraversal(EdgeType.TREE, 'sriov-pf', 'p-interface').store('x').createEdgeTraversal(EdgeType.TREE, 'p-interface', 'pserver').store('x'), builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'vserver', 'vnfc').createEdgeTraversal(EdgeType.TREE, 'vnfc', 'cp').createEdgeTraversal(EdgeType.COUSIN, 'cp', 'vlan-tag').store('x')).cap('x').unfold().dedup()"
+ }
+ },
+ {
+ "getNetworksByServiceInstance": {
+ "stored-query": "builder.createEdgeTraversal(EdgeType.COUSIN, 'service-instance', 'l3-network').store('x').union(builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'vlan-tag').store('x'),builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'l3-network').store('x').createEdgeTraversal(EdgeType.COUSIN, 'l3-network', 'vlan-tag').store('x')).cap('x').unfold().dedup()"
+ }
+ }
+ ]
}
diff --git a/aai-traversal/src/main/scripts/common_functions.sh b/aai-traversal/src/main/scripts/common_functions.sh
index 8bd881a..3aeb865 100644
--- a/aai-traversal/src/main/scripts/common_functions.sh
+++ b/aai-traversal/src/main/scripts/common_functions.sh
@@ -29,7 +29,7 @@ execute_spring_jar(){
shift 2;
- EXECUTABLE_JAR=$(ls ${PROJECT_HOME}/lib/aai-traversal-*SNAPSHOT.jar);
+ EXECUTABLE_JAR=$(ls ${PROJECT_HOME}/lib/*.jar);
JAVA_OPTS="${JAVA_PRE_OPTS}";
JAVA_OPTS="-DAJSC_HOME=$PROJECT_HOME";
@@ -40,6 +40,14 @@ execute_spring_jar(){
JAVA_OPTS="$JAVA_OPTS -Dlogback.configurationFile=${logbackFile}";
JAVA_OPTS="${JAVA_OPTS} ${JAVA_POST_OPTS}";
+ export SOURCE_NAME=$(grep '^schema.source.name=' ${PROJECT_HOME}/resources/application.properties | cut -d"=" -f2-);
+ # Needed for the schema ingest library beans
+ eval $(grep '^schema\.' ${PROJECT_HOME}/resources/application.properties | \
+ sed 's/^\(.*\)$/JAVA_OPTS="$JAVA_OPTS -D\1"/g' | \
+ sed 's/${server.local.startpath}/${PROJECT_HOME}\/resources/g'| \
+ sed 's/${schema.source.name}/'${SOURCE_NAME}'/g'\
+ )
+
${JAVA_HOME}/bin/java ${JVM_OPTS} ${JAVA_OPTS} -jar ${EXECUTABLE_JAR} "$@"
}
diff --git a/aai-traversal/src/main/scripts/install/instutils.sh b/aai-traversal/src/main/scripts/install/instutils.sh
deleted file mode 100644
index ab94a9c..0000000
--- a/aai-traversal/src/main/scripts/install/instutils.sh
+++ /dev/null
@@ -1,725 +0,0 @@
-#!/bin/ksh
-#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-ECHO=${ECHO:-echo}
-
-q_flags="-qq -k$$"
-
-show_install=${PROJECT_HOME}/install/show_install
-
-############################################################################
-# checkgroup groupname gid
-# checks if group is already in /etc/group and if it has the right gid
-# if it's not there, it adds it
-# gid can be DC if you don't care
-############################################################################
-checkgroup () {
- ecode=0
- OFILE=/tmp/group.$$
- getent group "$1" > $OFILE
- if [ $? -eq 0 ]
- then
- CHECKGID=$( grep "^$1:" $OFILE | cut -f3 -d: 2>/dev/null )
- CHECKGROUPPRESENT=$( grep "^$1:" $OFILE | cut -f1 -d: 2>/dev/null )
- CHECKGROUP=$( grep ":$2:" $OFILE | cut -f1 -d: 2>/dev/null )
- fi
- if [ "${CHECKGID}" = "" -a "${CHECKGROUP}" = "" ]
- then
- ${ECHO} "Adding $1 group ..."
- if [ "$2" = "DC" ]
- then
- groupadd $1
- else
- groupadd -g $2 $1
- fi
- if [ "$?" != "0" ]
- then
- ${ECHO} "Cannot add group $1, continuing..."
- ecode=1
- fi
- else
- if [ "${CHECKGROUPPRESENT}" = "$1" ]
- then
- if [ "$2" != "DC" ]
- then
- if [ "${CHECKGID}" != "$2" ]
- then
- ${ECHO} "ERROR: $1 group added but with wrong gid \"${CHECKGID}\"; should be $2"
- ecode=1
- fi
- if [ "${CHECKGROUP}" != "$1" ]
- then
- ${ECHO} "ERROR: wrong group \"${CHECKGROUP}\" for gid $2, group should be $1"
- ecode=1
- fi
- else
- ${ECHO} "$1 group has already been added"
- fi
- fi
- fi
- rm -f $OFILE
- return ${ecode}
-}
-
-############################################################################
-# checkuser username uid homedir shell group
-# checks if the username/uid/homedir/shell combo is already in /etc/passwd
-# if not, it adds it
-# if the login is there and the uid belongs to a different user, it errors
-# if the login is there and the shell is not correct, it errors
-# uid may be DC for don't care
-############################################################################
-checkuser () {
- ecode=0
- OFILE=/tmp/user.$$
- getent passwd $1 > $OFILE
- if [ $? -eq 0 ]
- then
- CHECKUID=$( grep "^$1:" $OFILE | cut -f3 -d: 2>/dev/null )
- CHECKLOGIN=$( grep ":x:$2:" $OFILE | cut -f1 -d: 2>/dev/null )
- CHECKLOGINPRESENT=$( grep "^$1:" $OFILE | cut -f1 -d: 2>/dev/null )
- CHECKSHELL=$( grep "^$1:" $OFILE | cut -f7 -d: 2>/dev/null )
- CHECKHOME=$( grep "^$1:" $OFILE | cut -f6 -d: 2>/dev/null )
- fi
-
- if [ ! -d $3 ]
- then
- mkdir -p $3
- if [ "$?" != "0" ]
- then
- ${ECHO} "mkdir -p $3 failed"
- ecode=1
- fi
- chmod -R 755 $3
- fi
- if [ "${CHECKUID}" = "" -a "${CHECKLOGIN}" = "" ]
- then
- ${ECHO} "Adding $1 login ..."
- if [ "$2" = "DC" ]
- then
- useradd -g $5 -d $3 -s $4 -c "$1 LOGIN" -m $1
- else
- useradd -u $2 -g $5 -d $3 -s $4 -c "$1 LOGIN" -m $1
- fi
- if [ "$?" != "0" ]
- then
- ${ECHO} "Cannot add $1 login, continuing..."
- ecode=1
- fi
- elif [ "${CHECKLOGINPRESENT}" = "$1" -a "$2" = "DC" -a "${CHECKSHELL}" = "$4" -a "${CHECKHOME}" = "$3" ]
- then
- ${ECHO} "The '$1' login has already been added to system with UID ${CHECKUID}."
-
- elif [ "${CHECKUID}" = "$2" -a "${CHECKLOGIN}" = "$1" -a "${CHECKSHELL}" = "$4" -a "${CHECKHOME}" = "$3" ]
- then
- ${ECHO} "The '$1' login has already been added to system."
- else
- if [ "$2" != "DC" -a "${CHECKUID}" != "$2" ]
- then
- ${ECHO} "ERROR: $1 login added but with wrong uid \"${CHECKUID}\"; should be $2"
- ecode=1
- fi
- if [ "$2" != "DC" -a "${CHECKLOGIN}" != "$1" ]
- then
- ${ECHO} "ERROR: wrong login \"${CHECKLOGIN}\" for uid $2, login should be $1"
- ecode=1
- fi
- if [ "${CHECKHOME}" != "$3" ]
- then
- ${ECHO} "ERROR: wrong home directory \"${CHECKHOME}\" for login $1, should be $3"
- ecode=1
- fi
- if [ "${CHECKSHELL}" != "$4" ]
- then
- ${ECHO} "ERROR: $1 login not set up with $4"
- ecode=1
- fi
- fi
- rm -f $OFILE
- return ${ecode}
-}
-
-############################################################################
-# checkhome username homedir action
-# if the user doesn't exist, it errors
-# checks if the username has homedir as its home directory
-# if not and action is null, it modifies it
-# if not and action is mod, it modifies it
-# if not and action is error, it errors
-############################################################################
-checkhome () {
- ecode=0
- OFILE=/tmp/user.$$
- getent passwd $1 > $OFILE
- if [ $? -eq 0 ]
- then
- CHECKUID=$( grep "^$1:" $OFILE | cut -f3 -d: 2>/dev/null )
- CHECKGID=$( grep "^$1:" $OFILE | cut -f4 -d: 2>/dev/null )
- CHECKHOME=$( grep "^$1:" $OFILE | cut -f6 -d: 2>/dev/null )
-
- if [ "${CHECKHOME}" = "$2" ]
- then
- if [ ! -d $2 ]
- then
- mkdir -p $2
- if [ "$?" != "0" ]
- then
- ${ECHO} "mkdir -p $2 failed"
- ecode=1
- fi
- chown ${CHECKUID}:${CHECKGID} $2
- chmod -R 755 $2
- fi
- else
- # modify the user to set the new home dir and move any current home dir to there
- usermod -d $2 -m $1
- if [ "$?" != "0" ]
- then
- ${ECHO} "usermod -d $2 -m $1 failed"
- ecode=1
- fi
- fi
- else
- ${ECHO} "user $1 doesn't exist"
- ecode=1
- fi
-
- rm -f $OFILE
- return ${ecode}
-}
-
-##################################################################
-#checkloginsforpwds checks /etc/shadow for logins without passwords
-# the first argument is a list of logins to check
-##################################################################
-checkloginsforpwds () {
- for i in $1
- do
- CHECK_LOGIN=$( grep "^${i}:" /etc/shadow | grep "!!" )
- if [ "${CHECK_LOGIN}" != "" ]
- then
- NOPWD="${NOPWD} ${i}"
- fi
- done
-
- if [ "${NOPWD}" != "" ]
- then
- ${ECHO} ""
- ${ECHO} "REMINDER: The following logins must have a passwords assigned to them.\n"
- ${ECHO} "##############################################################"
- ${ECHO} " ${NOPWD} "
- ${ECHO} "##############################################################"
- ${ECHO} ""
- ${ECHO} " This must be done by executing the following command:"
- ${ECHO} ""
- ${ECHO} " $ passwd <login>"
- ${ECHO} ""
- ${ECHO} " After typing the \"passwd\" command you will be prompted for"
- ${ECHO} " the password for the login."
- ${ECHO} ""
- fi
-}
-
-##################################################################
-# checkassignpasswords checks /etc/shadow for logins without passwords
-# and then asks the user to assign one
-# the first argument is a list of logins to check
-##################################################################
-checkassignpasswords () {
- for i in $1
- do
- CHECK_LOGIN=$( grep "^${i}:" /etc/shadow | grep LK )
- if [ "${CHECK_LOGIN}" != "" ]
- then
- ${ECHO} "Please assign a password for the '${i}' login"
- passwd ${i}
- ${ECHO}
- fi
- done
-}
-
-############################################################################
-# copywithperms origfile destfile owner group perms [save suffix]
-# copies origfile to destfile, giving destfile ownership and permssions
-# from owner, group, and perms. If the sixth argument is "save", the
-# original is saved in the same place with the seventh argument as the
-# suffix. If the seventh arg is null, $$ is used
-############################################################################
-copywithperms () {
- SAVE=0
- ECODE=0
- if [ "$6" = "save" -a -f "$2" ]
- then
- if [ "$7" = "" ]
- then
- cp $2 $2.$$
- else
- cp $2 $2.$7
- fi
- fi
- if [ -f $1 ]
- then
- cp $1 $2
- ECODE=$?
- chown ${3}:${4} $2
- chmod $5 $2
- else
- ${ECHO} "$1 is not a file. No copy done!"
- fi
- return ${ECODE}
-}
-
-############################################################################
-# mkdirwithperms dirname owner group perms ifExist
-# makes directory dirname , giving dirname ownership and permssions
-# from owner, group, and perms.
-# perms can be DC if you don't care
-# ifExist can be rm, error, dontcreate
-############################################################################
-mkdirwithperms () {
- ECODE=0
- if [ -f $1 ]
- then
- ECODE=1
- ${ECHO} "$1 exists but is a file. No mkdir done!"
- elif [ -d $1 ]
- then
- if [ "$5" = "rm" ]
- then
- rm -rf $1
- mkdir -p $1
- if [ "$?" != "0" ]
- then
- ${ECHO} "mkdir -p $1 failed"
- ECODE=1
- fi
- elif [ "$5" = "error" ]
- then
- ECODE=1
- ${ECHO} "$1 is a directory. No mkdir done!"
- elif [ "$5" != "dontcreate" ]
- then
- mkdir -p $1
- if [ "$?" != "0" ]
- then
- ${ECHO} "mkdir -p $1 failed"
- ECODE=1
- fi
- fi
- else
- mkdir -p $1
- if [ "$?" != "0" ]
- then
- ${ECHO} "mkdir -p $1 failed"
- ECODE=1
- fi
- fi
- if [ "${ECODE}" = "0" ]
- then
- chown ${2}:${3} $1
- if [ "$4" != "DC" ]
- then
- chmod $4 $1
- fi
- fi
- return ${ECODE}
-}
-
-
-############################################################################
-# chownwithperms owner group file mode
-# changes the ownership and mode for the specified file
-############################################################################
-chownwithperms () {
- chown ${1}:${2} $3
- chmod $4 $3
-}
-
-verifywhosrunning () {
- userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
- if [ "${userid}" != "$1" ]
- then
- ${ECHO} "You must be $1 to run $0"
- exit 1
- fi
-}
-
-replaceline() {
-
- name=$1
- value=$2
- file=$3
-
- if [ -z "${file}" ]
- then
- ${ECHO} "replaceline: ERROR: insufficient arguments: $1 $2" >&2
- return 1
- fi
-
- if [ -n "$4" ]
- then
- ${ECHO} "replaceline: ERROR: too many arguments: $1 $2 $3 $4" >&2
- return 1
- fi
-
- if [ -f ${file} ]
- then
- grep -v "^${name}=" ${file} > ${file}.$$
- ${ECHO} "${name}=${value}" >> ${file}.$$
- mv -f ${file}.$$ ${file}
- else
- ${ECHO} "${name}=${value}" > ${file}
- fi
-}
-
-replaceline_with_quotes() {
-
- name=$1
- value=$2
- file=$3
-
- if [ -z "${file}" ]
- then
- ${ECHO} "replaceline: ERROR: insufficient arguments: $1 $2" >&2
- return 1
- fi
-
- if [ -n "$4" ]
- then
- ${ECHO} "replaceline: ERROR: too many arguments: $1 $2 $3 $4" >&2
- return 1
- fi
-
- if [ -f ${file} ]
- then
- grep -v "^${name}=" ${file} > ${file}.$$
- ${ECHO} "${name}=\"${value}\"" >> ${file}.$$
- mv -f ${file}.$$ ${file}
- else
- ${ECHO} "${name}=\"${value}\"" > ${file}
- fi
-}
-
-# this deleteline will not actually delete the entry
-# but only delete the value leaving the name=
-# when siteconf.pl went from Boilerplate to Fillin,
-# we changed this because Fillin can handle null values.
-
-deleteline() {
-
- name=$1
- file=$2
-
- if [ -z "${file}" ]
- then
- ${ECHO} "deleteline: ERROR: insufficient arguments" >&2
- return 1
- fi
-
- if [ -f ${file} ]
- then
- cp ${file} ${file}.$$
- lno=$( grep -n "^${name}=" ${file} | cut -d: -f1 )
- if [ "${lno}" != "" ]
- then
- sed "${lno}d" ${file} > ${file}.$$
- fi
- ${ECHO} "${name}=" >> ${file}.$$
- mv -f ${file}.$$ ${file}
- else
- ${ECHO} "${name}=" > ${file}
- fi
-}
-
-# dropline will drop the line from the file
-# unlike the deleteline function above
-
-dropline() {
-
- name=$1
- file=$2
-
- if [ -z "${file}" ]
- then
- ${ECHO} "dropline: ERROR: insufficient arguments" >&2
- return 1
- fi
-
- if [ -f ${file} ]
- then
- grep -v "^${name}=" ${file} > ${file}.$$
- mv -f ${file}.$$ ${file}
- fi
-}
-
-pause_install() {
-
- if [ "${Pause}" = "1" ]
- then
- if ${chkyn} -y "Continue with ${Itype}?"
- then
- return 0
- else
- ${ECHO} "${PNAME}: quitting" >&2
- exit 1
- fi
- fi
-}
-
-get_ITYPE() {
- ITYPE=$( ${chkyn} -fer ${q_flags} -h\? ${ITYPE:+-D"${ITYPE}"} -H \
-" If you are doing a fresh install, answer 'I' or answer 'U' for upgrade." \
-"Is this a fresh 'install' or 'upgrade' (I or U):${ITYPE:+ [${ITYPE}]}" \
- '^[IU]$' \
-'*** ERROR *** Entry must be I or U.' )
-}
-
-
-###
-# Change an /etc/group entry to allow a give user to change group into it.
-# arg1 = comma-sep group list (e.g., sylantro,other)
-# arg2 = user
-###
-addUserToGroup()
-{
- if [ -z "$1" -o -z "$2" ]
- then
- ${ECHO} "addUserToGroup failed, need two args, group and user"
- return 1
- else
- usermod -G $1 $2
- fi
- return 0
-}
-
-################### BACKUP AND RESTORE METHODS ########################
-################### VARIABLES ##########################
-################### VARIABLES ##########################
-################### VARIABLES ##########################
-################### VARIABLES ##########################
-
-NO_FILE_INDICATOR="__NO_PREVIOUS_FILE__"
-SAVE_SUFFIX=${Project}save
-
-################### SUBROUTINES ##########################
-################### SUBROUTINES ##########################
-################### SUBROUTINES ##########################
-################### SUBROUTINES ##########################
-################### SUBROUTINES ##########################
-
-##############################################################################
-# Purpose: make a backup copy of a file in such a way that the backup
-# won't be lost by re-running your script PLUS give you a predictable name
-# for the most recent back up to use when you roll back.
-#
-# Input:
-# - Arg1 = file to back up
-#
-# Requirement:
-# - Remove $1.save before calling this function or else a copy won't be made.
-# - Make sure to set the value of env value TODAY to use as a suffix.
-#
-# Description:
-# Copy $1 to $1.${SAVE_SUFFIX}.${TODAY}, then link that to $1.save.
-#
-##############################################################################
-make_backup_copy ()
-{
- if [ -z "${TODAY}" ]
- then
- ${ECHO} "make_backup_copy - TODAY variable is unset" >&2
- return 1
- fi
-
- if [ -f $1.${SAVE_SUFFIX}.${TODAY} -a -h $1.save ]
- then
- ${ECHO} "Note: backup already exists for $1"
- else
- # if existing file doesn't exist, set up for later delete by rollback
- if [ ! -f $1 -a ! -h $1 ]
- then
- ${ECHO} ${NO_FILE_INDICATOR} > $1
- fi
- cp -p $1 $1.${SAVE_SUFFIX}.${TODAY}
- ln -s $1.${SAVE_SUFFIX}.${TODAY} $1.save
- fi
-}
-
-################################################################################
-# Purpose: Find the actual file that belongs to $1, which can be a symbolic
-# link.
-#
-# Input:
-# - Arg1 = path to file or link
-# - Arg2 = true if you want _SRCFILE to be null if no actual file is
-# found. If Arg2 is NOT true, then _SRCFILE is set to Arg1.
-#
-# Side Effect:
-# Sets value of _SRCFILE variable
-################################################################################
-find_source_file ()
-{
- if [ -z "$1" ]
- then
- ${ECHO} "find_source_file - needs at least one argument" >&2
- return 1
- fi
-
- ls -l $1 > /tmp/tls$$
- cat /tmp/tls$$ | sed 's/ */ /g' |cut -f11 > /tmp/cuts$$
- _SRCFILE=$( cat /tmp/cuts$$ )
-
- if [ "$_SRCFILE" = "" ]
- then
- if [ "$2" != "true" ]
- then
- _SRCFILE=$1
- fi
- fi
- rm -f /tmp/tls$$ /tmp/cuts$$
-}
-
-#######################################################################
-# Purpose: Expands template file using data in COPT variable.
-# Diffs expanded template against existing file and installs if different.
-# If arg5 = true, sets _config_changes=1 so you know that changes were installed
-#
-# Makes its own backup copy using make_backup_copy.
-# Does install if different using install_if_different.
-#
-# Input:
-# Arg1 = template path without .tmpl extension
-# Arg2 = install path
-# Arg3 = owner and group (e.g., root:other)
-# Arg4 = permissions (e.g., 750)
-# Arg5 = true/false, if expanded file is different than installed .
-# Set _config_changes to 1 if Arg5 is true. Otherwise, don't touch
-# _config_changes
-#
-# Requirement: set COPT to the value of the -c option to siteconf.pl
-#
-# Side Effect: sets _config_changes=1 if changes were installed
-#######################################################################
-install_from_template ()
-{
- if [ -z "${COPT}" ]
- then
- ${ECHO} "install_from_template - COPT is unset" >&2
- return 1
- fi
-
- TMPL=$( basename ${1} )
- OFILE=/tmp/${TMPL}
- if [ -f ${1}.tmpl ]
- then
-
- ${PROJECT_HOME}/bin/siteconf.pl -t ${1}.tmpl -c ${COPT} -o ${OFILE}
- install_if_different ${OFILE} ${2} ${3} ${4} ${5}
-
- else
- ${ECHO} "install_from_template: ERROR: Missing ${TMPL}.tmpl" >&2
- fi
- rm -f ${OFILE}
-}
-
-
-#######################################################################
-# Purpose: Copies source to destination if the two are different.
-# If arg5 = true, sets _config_changes=1 so you know that changes were installed
-#
-# Makes its own backup copy using make_backup_copy.
-#
-# Input:
-# Arg1 = source path
-# Arg2 = install path
-# Arg3 = owner and group (e.g., root:other)
-# Arg4 = permissions (e.g., 750)
-# Arg5 = true/false, if expanded file is different than installed .
-# Set _config_changes to 1 if Arg5 is true. Otherwise, don't touch
-# _config_changes
-#
-# Side Effect: sets _config_changes=1 if changes were installed
-#######################################################################
-install_if_different()
-{
- # Take backup before changing.
- # Only change if different.
- if [ -f ${2} ]
- then
- diff ${1} ${2} > /dev/null
- diffrc=$?
- if [ "${diffrc}" != "0" ]
- then
- ${ECHO} "Installing ${2}"
- make_backup_copy ${2}
- mv -f ${1} ${2}
- chown ${3} ${2}
- chmod ${4} ${2}
- if [ "${5}" = "true" ]
- then
- _config_changes=1
- fi
- fi
- else
- # creates backup containing ${NO_FILE_INDICATOR} for rollback removal
- make_backup_copy ${2}
- mv -f ${1} ${2}
- chown ${3} ${2}
- chmod ${4} ${2}
- if [ "${5}" = "true" ]
- then
- _config_changes=1
- fi
- fi
-}
-###################################################################
-# Purpose: rollback a file whose backup was made with make_backup_copy
-#
-# Input:
-# Arg1 is path of installed file. Subroutine will look for ${1}.save
-# Arg2 = true/false, if expanded file is different than installed,
-# set _config_changes to 1 if Arg2 is true. Otherwise, don't touch
-# _config_changes
-#
-# Side Effect: sets _config_changes=1 if changes were rolled back
-###################################################################
-rollback_from_save ()
-{
- if [ -f ${1}.save -o -h ${1}.save ]
- then
- find_source_file ${1}.save false
- ${ECHO} "rollback_from_save: rolling back to $( basename ${_SRCFILE} )"
- grep ${NO_FILE_INDICATOR} ${_SRCFILE} > /dev/null
- if [ $? -eq 0 ]
- then
- rm -f ${_SRCFILE} ${1}
- else
- mv -f ${_SRCFILE} ${1}
- fi
- if [ "${2}" = "true" ]
- then
- _config_changes=1
- fi
- rm -f ${1}.save
- fi
-}
diff --git a/aai-traversal/src/main/scripts/putTool.sh b/aai-traversal/src/main/scripts/putTool.sh
index 6630d8f..593d0c9 100644
--- a/aai-traversal/src/main/scripts/putTool.sh
+++ b/aai-traversal/src/main/scripts/putTool.sh
@@ -50,12 +50,10 @@ display_usage() {
cat <<EOF
Usage: $0 [options]
- 1. Usage: putTool.sh <resource-path> <json payload file> <optional HTTP Response code> <optional -display>
+ 1. Usage: putTool.sh <resource-path> <json payload file> <optional -display>
2. This script requires two arguments, a resource path and a file path to a json file containing the payload.
- 3. Example: query?format=xxxx customquery.json (possible formats are simple, raw, console, count, graphson, id, pathed, resource and resource_and_url)
- 4. Adding the optional HTTP Response code will allow the script to ignore HTTP failure codes that match the input parameter.
- 5. Adding the optional "-display" argument will display all data returned from the request, instead of just a response code.
-
+ 3. Example: resource-path and payload for a particular customer is: business/customers/customer/JohnDoe customerpayload.json
+ 4. Adding the optional "-display" argument will display all data returned from the request.
EOF
}
if [ $# -eq 0 ]; then
@@ -130,24 +128,30 @@ else
fi
fi
+fname=$JSONFILE
+if [ -f /tmp/$(basename $JSONFILE) ]; then
+ fname=/tmp/$(basename $JSONFILE)
+elif [ ! -f $JSONFILE ]; then
+ echo "The file $JSONFILE does not exist"
+ exit -1
+fi
+
if [ $MISSING_PROP = false ]; then
if [ $USEBASICAUTH = false ]; then
AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
else
AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
fi
-
if [ $RETURNRESPONSE = true ]; then
- curl --request PUT -sL -k $AUTHSTRING -H "Content-Type: application/json" -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T $JSONFILE $RESTURL$RESOURCE | python -mjson.tool
+ curl --request PUT -sL -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -H "Content-Type: application/json" -T $fname $RESTURL$RESOURCE | jq '.'
RC=$?
else
- result=`curl --request PUT -sL -w "%{http_code}" -o /dev/null -k $AUTHSTRING -H "Content-Type: application/json" -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -T $JSONFILE $RESTURL$RESOURCE`
+ result=`curl --request PUT -w "%{http_code}" -o /dev/null -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -H "Content-Type: application/json" -T $fname $RESTURL$RESOURCE`
#echo "result is $result."
RC=0;
if [ $? -eq 0 ]; then
case $result in
+([0-9])?)
- #if [[ "$result" -eq 412 || "$result" -ge 200 && $result -lt 300 ]]
if [[ "$result" -ge 200 && $result -lt 300 ]]
then
echo "PUT result is OK, $result"
@@ -184,4 +188,4 @@ else
fi
echo `date` " Done $0, returning $RC"
-exit $RC
+exit $RC \ No newline at end of file
diff --git a/aai-traversal/src/main/swm/package/nix/common/deinstall.env b/aai-traversal/src/main/swm/package/nix/common/deinstall.env
deleted file mode 100644
index fb0a9f5..0000000
--- a/aai-traversal/src/main/swm/package/nix/common/deinstall.env
+++ /dev/null
@@ -1,10 +0,0 @@
-
-# This file is used to set the environment which the install_*.sh files
-# will use when executing. Only set variables that must be derived at
-# installation time here. For variables that should be set by the installer
-# in SWM, add VariableDescriptor elements to the descriptor.xml. Place
-# logical steps in the install_preproc.sh or install_postproc.sh.
-
-. `dirname $0`/common.env
-
-AAI_USER=aaiadmin;export AAI_USER
diff --git a/aai-traversal/src/main/swm/package/nix/common/install_postproc.sh b/aai-traversal/src/main/swm/package/nix/common/install_postproc.sh
deleted file mode 100644
index ab5f2b7..0000000
--- a/aai-traversal/src/main/swm/package/nix/common/install_postproc.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/sh
-
-###
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-###
-
-##############################################################################
-# - SCLD GRM SERVICE
-# - Copyright 2009 AT&T Intellectual Properties
-##############################################################################
-
-
-. `dirname $0`/install.env
-
-cd ${ROOT_DIR};
-
-TEMPLATE_YAML_FILE=${ROOT_DIR}/docker-compose.template.yaml
-YAML_FILE=${ROOT_DIR}/docker-compose.yaml
-
-sh ${UTILPATH}/findreplace.sh ${TEMPLATE_YAML_FILE} ${YAML_FILE} || exit 200
-rm ${TEMPLATE_YAML_FILE}
-
-exit 0
diff --git a/aai-traversal/src/main/swm/package/nix/deinstall/preproc/pre_proc b/aai-traversal/src/main/swm/package/nix/deinstall/preproc/pre_proc
deleted file mode 100644
index 5f88c41..0000000
--- a/aai-traversal/src/main/swm/package/nix/deinstall/preproc/pre_proc
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd ../../common
-exec sh -x ./deinstall_preproc.sh
diff --git a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/docker-compose.template.yaml b/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/docker-compose.template.yaml
deleted file mode 100644
index 7d31b8b..0000000
--- a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/docker-compose.template.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-# SCLD_ENV is for the environment context for dme2
-# AAI_CHEF_ENV is used for both dme2 properites and
-# also used by chef to generate appropriate properties
-
-version: '2'
-services:
- aai-traversal:
- image: __REGISTRY__/__NAMESPACE__/aai-traversal:__IMAGE_VERSION__
- network_mode: host
- environment:
- - LOCAL_USER_ID=__LOCAL_USER_ID__
- - LOCAL_GROUP_ID=__LOCAL_GROUP_ID__
- volumes:
- - /opt/aai/logroot/AAI-GQ:/opt/aai/logroot/AAI-GQ
- - /opt/app/aai-traversal/appconfig/aai-client-cert.p12:/opt/app/aai-traversal/resources/etc/auth/aai-client-cert.p12
- - /opt/app/aai-traversal/appconfig/tomcat_keystore:/opt/app/aai-traversal/resources/etc/auth/tomcat_keystore
- - /opt/app/aai-traversal/appconfig/aai_policy.json:/opt/app/aai-traversal/resources/etc/auth/aai_policy.json
- - /opt/app/aai-traversal/appconfig/aaiconfig.properties:/opt/app/aai-traversal/resources/etc/appprops/aaiconfig.properties
- - /opt/app/aai-traversal/appconfig/application.properties:/opt/app/aai-traversal/resources/application.properties
- - /opt/app/aai-traversal/appconfig/dme2.properties:/opt/app/aai-traversal/resources/dme2.properties
- - /opt/app/aai-traversal/appconfig/localhost-access-logback.xml:/opt/app/aai-traversal/resources/localhost-access-logback.xml
- - /opt/app/aai-traversal/appconfig/logback.xml:/opt/app/aai-traversal/resources/logback.xml
- - /opt/app/aai-traversal/appconfig/janusgraph-cached.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-cached.properties
- - /opt/app/aai-traversal/appconfig/janusgraph-realtime.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
diff --git a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/execTool.sh b/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/execTool.sh
deleted file mode 100644
index ab634c0..0000000
--- a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/execTool.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/bin/bash
-
-export WORKING_DIR="$( cd "$(dirname "$0")" ; pwd -P )/"
-
-DOCKER_COMPOSE_CMD="docker-compose -f ${WORKING_DIR}/docker-compose.yaml";
-
-ARG=$1;
-
-if [ -z "$ARG" ]; then
- echo "Error: You need to at least provide one argument which is the script to execute";
- exit 1;
-fi;
-
-if [ "${ARG}" = "--debug" ]; then
-
- SCRIPT_NAME=$2;
-
- if [ -z "$SCRIPT_NAME" ]; then
- echo "Error: You need to provide the tool name after specifying the --debug flag";
- exit 1;
- fi;
-
- shift 2;
-
-else
- SCRIPT_NAME=$1;
- shift 1;
-fi;
-
-CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} ps -q aai-traversal);
-
-if [ $? -ne 0 ]; then
- echo "Error: seems like the container is not running, please run the commands to start aai-traversal";
- exit 1;
-fi;
-
-if [ ${SCRIPT_NAME} = "putTool.sh" ]; then
-
- PAYLOAD_FILE=$2;
-
- if [ ! -z "${PAYLOAD_FILE}" ] && [ -f "${PAYLOAD_FILE}" ]; then
- docker cp ${PAYLOAD_FILE} ${CONTAINER_NAME}:/tmp/$(basename ${PAYLOAD_FILE})
- fi;
-fi;
-
-${DOCKER_COMPOSE_CMD} exec --user aaiadmin aai-traversal ls /opt/app/aai-traversal/scripts/${SCRIPT_NAME} && {
-
- if [ "${ARG}" = "--debug" ]; then
- ${DOCKER_COMPOSE_CMD} exec --user aaiadmin aai-traversal bash -x /opt/app/aai-traversal/scripts/${SCRIPT_NAME} "$@"
- else
- ${DOCKER_COMPOSE_CMD} exec --user aaiadmin aai-traversal /opt/app/aai-traversal/scripts/${SCRIPT_NAME} "$@"
- fi;
-
- exit 0;
-} || {
- echo "Unable to find the tool in the /opt/app/aai-traversal/scripts";
- exit 1;
-}
diff --git a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/kill_resources.sh b/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/kill_resources.sh
deleted file mode 100644
index 00855da..0000000
--- a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/kill_resources.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-traversal
-
-docker-compose -f ${PROJECT_HOME}/docker-compose.yaml stop && \
- docker-compose -f ${PROJECT_HOME}/docker-compose.yaml rm -f
diff --git a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/start_resources.sh b/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/start_resources.sh
deleted file mode 100644
index 61819d4..0000000
--- a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/start_resources.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-traversal
-
-docker-compose -f ${PROJECT_HOME}/docker-compose.yaml up -d || exit 200
diff --git a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/stop_resources.sh b/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/stop_resources.sh
deleted file mode 100644
index 009e597..0000000
--- a/aai-traversal/src/main/swm/package/nix/dist_files/opt/app/aai-traversal/stop_resources.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-. /etc/profile.d/aai.sh
-PROJECT_HOME=/opt/app/aai-traversal
-
-docker-compose -f ${PROJECT_HOME}/docker-compose.yaml stop || exit 200
diff --git a/aai-traversal/src/main/swm/package/nix/initinst/postproc/post_proc b/aai-traversal/src/main/swm/package/nix/initinst/postproc/post_proc
deleted file mode 100644
index d017750..0000000
--- a/aai-traversal/src/main/swm/package/nix/initinst/postproc/post_proc
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd ../../common
-exec sh -x ./install_postproc.sh
diff --git a/aai-traversal/src/main/swm/package/nix/install/postproc/post_proc b/aai-traversal/src/main/swm/package/nix/install/postproc/post_proc
deleted file mode 100644
index d017750..0000000
--- a/aai-traversal/src/main/swm/package/nix/install/postproc/post_proc
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd ../../common
-exec sh -x ./install_postproc.sh
diff --git a/aai-traversal/src/main/swm/package/nix/install/preproc/pre_proc b/aai-traversal/src/main/swm/package/nix/install/preproc/pre_proc
deleted file mode 100644
index 3f1b26f..0000000
--- a/aai-traversal/src/main/swm/package/nix/install/preproc/pre_proc
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd ../../common
-exec sh -x ./install_preproc.sh