summaryrefslogtreecommitdiffstats
path: root/aai-traversal/src/main/java/org/onap
diff options
context:
space:
mode:
authorVenkata Harish K Kajur <vk250x@att.com>2017-09-28 17:44:52 -0400
committerVenkata Harish K Kajur <vk250x@att.com>2017-09-28 17:44:57 -0400
commitbed3675b3616e089d8581b7cb755adb754e2f3f9 (patch)
treec9be7f3df10e18079a8e1e83330652086771fd8b /aai-traversal/src/main/java/org/onap
parente76326c68fb9b1dff847275fde0f2cc78ce2f8a2 (diff)
Change all the packages from openecomp to onap
Issue-ID: AAI-61 AAI-82 Change-Id: Iba7de4bd0dea1398d8608c9c514d78ad69201d0a Signed-off-by: Venkata Harish K Kajur <vk250x@att.com>
Diffstat (limited to 'aai-traversal/src/main/java/org/onap')
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java99
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java55
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java21
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java127
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java21
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java38
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/config/DmaapConfig.java43
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java3759
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ResultSet.java169
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java1143
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java27
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java286
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java303
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java64
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java55
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/ExceptionHandler.java130
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java197
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java144
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java29
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java29
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java231
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java89
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java77
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java131
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java97
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java48
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java200
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java28
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java254
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/util/EchoResponse.java122
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/util/LogFormatTools.java37
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java161
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/transforms/Converter.java26
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/transforms/LowerCamelToLowerHyphenConverter.java35
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/transforms/LowerHyphenToLowerCamelConverter.java82
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/transforms/MapTraverser.java88
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java110
-rw-r--r--aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java254
38 files changed, 8809 insertions, 0 deletions
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java
new file mode 100644
index 0000000..6aa1f29
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsErrorMessageLookupService.java
@@ -0,0 +1,99 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.ajsc_aai;
+
+//import java.util.HashMap;
+//import java.util.Map;
+
+//import javax.ws.rs.GET;
+//import javax.ws.rs.HeaderParam;
+//import javax.ws.rs.Path;
+//import javax.ws.rs.PathParam;
+//import javax.ws.rs.Produces;
+
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//import org.springframework.web.context.ContextLoader;
+//import org.springframework.web.context.WebApplicationContext;
+
+//import ajsc.ErrorMessageLookupService;
+
+//@Path("/errormessage")
+//public class JaxrsErrorMessageLookupService {
+
+ //private final static Logger logger = LoggerFactory
+ //.getLogger(ErrorMessageLookupService.class);
+
+ //*
+ //Gets the message.
+
+ //@param input the input
+ //@param errorCode the error code
+ //@param appId the app id
+ //@param operation the operation
+ //@param messageText the message text
+ //@param isRESTService the is REST service
+ //@param faultEntity the fault entity
+ //@param ConvID the conv ID
+ //@return the message
+ //@GET
+ //@Path("/emls")
+ //@Produces("text/plain")
+ //public String getMessage(@PathParam("input") String input,
+ //@HeaderParam("errorCode") String errorCode,
+ //@HeaderParam("appId") String appId,
+ //@HeaderParam("operation") String operation,
+ //@HeaderParam("messageText") String messageText,
+ //@HeaderParam("isRESTService") String isRESTService,
+ //@HeaderParam("faultEntity") String faultEntity,
+ //@HeaderParam("ConvID") String ConvID) {
+
+ //Map<String, String> headers = new HashMap<String, String>();
+ //headers.put(errorCode, errorCode);
+ //headers.put(appId, appId);
+ //headers.put(operation, operation);
+ //headers.put(messageText, messageText);
+ //headers.put(isRESTService, isRESTService);
+ //headers.put(faultEntity, faultEntity);
+ //headers.put(ConvID, ConvID);
+
+ //WebApplicationContext applicationContext = ContextLoader
+ //.getCurrentWebApplicationContext();
+
+ //ErrorMessageLookupService e = (ErrorMessageLookupService) applicationContext
+ //.getBean("errorMessageLookupService");
+
+ //String message = e.getExceptionDetails(appId, operation, errorCode,
+ //messageText,isRESTService, faultEntity, ConvID);
+
+ //System.out.println("Error code = " + errorCode);
+ //System.out.println("appId = " + appId);
+ //System.out.println("operation = " + operation);
+ //System.out.println("messageText = " + messageText);
+ //System.out.println("isRESTService = " + isRESTService);
+ //System.out.println("faultEntity = " + faultEntity);
+ //System.out.println("ConvID = " + ConvID);
+ //return "The exception message is:\n " + message;
+ //}
+
+//}
+
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java
new file mode 100644
index 0000000..a1cc2ca
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/JaxrsUserService.java
@@ -0,0 +1,55 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.ajsc_aai;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import java.util.Map;
+import java.util.HashMap;
+
+@Path("/user")
+public class JaxrsUserService {
+
+ private static final Map<String,String> userIdToNameMap;
+ static {
+ userIdToNameMap = new HashMap<String,String>();
+ userIdToNameMap.put("userID1","Name1");
+ userIdToNameMap.put("userID2","Name2");
+ }
+
+ /**
+ * Lookup user.
+ *
+ * @param userId the user id
+ * @return the string
+ */
+ @GET
+ @Path("/{userId}")
+ @Produces("text/plain")
+ public String lookupUser(@PathParam("userId") String userId) {
+ String name = userIdToNameMap.get(userId);
+ return name != null ? name : "unknown id";
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java
new file mode 100644
index 0000000..c806045
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesListener.java
@@ -0,0 +1,21 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java
new file mode 100644
index 0000000..7274c61
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertiesMap.java
@@ -0,0 +1,127 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.ajsc_aai.filemonitor;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class ServicePropertiesMap
+{
+ private static HashMap<String, HashMap<String, String>> mapOfMaps = new HashMap<String, HashMap<String, String>>();
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ServicePropertiesMap.class);
+
+ /**
+ * Refresh.
+ *
+ * @param file the file
+ * @throws Exception the exception
+ */
+ public static void refresh(File file) throws Exception
+ {
+ try
+ {
+ LOGGER.info("Loading properties - " + (file != null?file.getName():""));
+
+ //Store .json & .properties files into map of maps
+ String filePath = file.getPath();
+
+ if(filePath.lastIndexOf(".json")>0){
+
+ ObjectMapper om = new ObjectMapper();
+ TypeReference<HashMap<String, String>> typeRef = new TypeReference<HashMap<String, String>>() {};
+ HashMap<String, String> propMap = om.readValue(file, typeRef);
+ HashMap<String, String> lcasePropMap = new HashMap<String, String>();
+ for (String key : propMap.keySet() )
+ {
+ String lcaseKey = ifNullThenEmpty(key);
+ lcasePropMap.put(lcaseKey, propMap.get(key));
+ }
+
+ mapOfMaps.put(file.getName(), lcasePropMap);
+
+
+ }else if(filePath.lastIndexOf(".properties")>0){
+ Properties prop = new Properties();
+ FileInputStream fis = new FileInputStream(file);
+ prop.load(fis);
+
+ @SuppressWarnings("unchecked")
+ HashMap<String, String> propMap = new HashMap<String, String>((Map)prop);
+
+ mapOfMaps.put(file.getName(), propMap);
+ }
+
+ LOGGER.info("File - " + file.getName() + " is loaded into the map and the corresponding system properties have been refreshed");
+ }
+ catch (Exception e)
+ {
+ LOGGER.error("File " + (file != null?file.getName():"") + " cannot be loaded into the map ", e);
+ throw new Exception("Error reading map file " + (file != null?file.getName():""), e);
+ }
+ }
+
+ /**
+ * Gets the property.
+ *
+ * @param fileName the file name
+ * @param propertyKey the property key
+ * @return the property
+ */
+ public static String getProperty(String fileName, String propertyKey)
+ {
+ HashMap<String, String> propMap = mapOfMaps.get(fileName);
+ return propMap!=null?propMap.get(ifNullThenEmpty(propertyKey)):"";
+ }
+
+ /**
+ * Gets the properties.
+ *
+ * @param fileName the file name
+ * @return the properties
+ */
+ public static HashMap<String, String> getProperties(String fileName){
+ return mapOfMaps.get(fileName);
+ }
+
+ /**
+ * If null then empty.
+ *
+ * @param key the key
+ * @return the string
+ */
+ private static String ifNullThenEmpty(String key) {
+ if (key == null) {
+ return "";
+ } else {
+ return key;
+ }
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java
new file mode 100644
index 0000000..c806045
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/filemonitor/ServicePropertyService.java
@@ -0,0 +1,21 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
diff --git a/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java
new file mode 100644
index 0000000..71c290b
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/ajsc_aai/util/ServicePropertiesMapBean.java
@@ -0,0 +1,38 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.ajsc_aai.util;
+
+import org.onap.aai.ajsc_aai.filemonitor.ServicePropertiesMap;
+
+public class ServicePropertiesMapBean {
+
+ /**
+ * Gets the property.
+ *
+ * @param propFileName the prop file name
+ * @param propertyKey the property key
+ * @return the property
+ */
+ public static String getProperty(String propFileName, String propertyKey) {
+ return ServicePropertiesMap.getProperty(propFileName, propertyKey);
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/config/DmaapConfig.java b/aai-traversal/src/main/java/org/onap/aai/config/DmaapConfig.java
new file mode 100644
index 0000000..247ecf7
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/config/DmaapConfig.java
@@ -0,0 +1,43 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.config;
+
+import org.apache.activemq.broker.BrokerService;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class DmaapConfig {
+
+ @Bean(destroyMethod = "stop")
+ public BrokerService brokerService() throws Exception {
+
+ BrokerService broker = new BrokerService();
+ broker.addConnector("tcp://localhost:61446");
+ broker.setPersistent(false);
+ broker.setUseJmx(false);
+ broker.setSchedulerSupport(false);
+ broker.start();
+
+ return broker;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java
new file mode 100644
index 0000000..5e7f9d9
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ModelBasedProcessing.java
@@ -0,0 +1,3759 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.dbgraphgen;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.db.DbMethHelper;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbgen.PropertyLimitDesc;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.query.builder.QueryBuilder;
+import org.onap.aai.schema.enums.PropertyMetadata;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.db.EdgeRules;
+import org.onap.aai.serialization.db.EdgeType;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.util.AAIConfig;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import com.google.common.util.concurrent.SimpleTimeLimiter;
+import com.google.common.util.concurrent.TimeLimiter;
+import com.google.common.util.concurrent.UncheckedTimeoutException;
+
+/**
+ * Utility class that uses Model/Named-Query definitions to navigate the graph.
+ */
+public class ModelBasedProcessing{
+
+ private EELFLogger LOGGER = EELFManager.getInstance().getLogger(ModelBasedProcessing.class);
+ private final int MAX_LEVELS = 50; // max depth allowed for our model - to protect against infinite loop problems
+
+ private TransactionalGraphEngine engine;
+ private Loader loader;
+ private DBSerializer serializer;
+ private DbMethHelper dbMethHelper;
+
+ protected ModelBasedProcessing() {
+
+ }
+ public ModelBasedProcessing(Loader loader, TransactionalGraphEngine engine, DBSerializer serializer) {
+ this.loader = loader;
+ this.engine = engine;
+ this.serializer = serializer;
+ dbMethHelper = new DbMethHelper(loader, engine);
+ }
+ /**
+ * Gets the start nodes and model-ver's.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param passedModelVersionId the passed model-version-id -- optional (unique id for a model-ver)
+ * @param passedModelId the passed model-invariant-id -- optional
+ * @param passedModelName the passed model-name -- optional
+ * @param passedTopNodeType the passed top node type -- optional (needed if neither model=invariant-id nor model-version-id is passed)
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes -- optional (used to locate the first node(s) of instance data)
+ * @param apiVer the api ver
+ * @return HashMap of startNodes and their corresponding model-version-id's
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,String> getStartNodesAndModVersionIds( String transId, String fromAppId,
+ String passedModelVersionId,
+ String passedModelInvId,
+ String passedModelName,
+ String passedTopNodeType,
+ List<Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer )
+ throws AAIException{
+ // ----------------------------------------------------------------------------------------------------
+ // Get a hash for all start-nodes (key = vtxId, val = modelVersionId that applies)
+ // If no start-node-key info is passed, then use either the passed modelVersion or
+ // the passed model-invariant-id or model-name to collect them.
+ // If start-node-key info is given, use it instead to look for start-nodes.
+ // Note: if ONLY start-node-key info is given, then it would have to map to nodes which
+ // have persona data. Otherwise we'd have no way to know what model to collect data with.
+ // ----------------------------------------------------------------------------------------------------
+
+ Iterator<Vertex> startVerts = null;
+ Map<String, String> startVertInfo = new HashMap<>();
+
+ if( startNodeFilterArrayOfHashes.isEmpty() ){
+ // Since they did not give any data to find start instances, we will have to find them
+ // using whatever model-info they provided so we can use it to map to persona-data in the db.
+ if( (passedModelVersionId == null || passedModelVersionId.equals(""))
+ && (passedModelInvId == null || passedModelInvId.equals(""))
+ && (passedModelName == null || passedModelName.equals(""))){
+ throw new AAIException("AAI_6118", "ModelInvariantId or ModelName or ModelVersionId required if no startNodeFilter data passed.");
+ }
+ else {
+ // Use whatever model info they pass to find start-node instances
+ // Get the first/top named-query-element used by this query
+ if( passedModelVersionId != null && !passedModelVersionId.equals("") ){
+ // Need to look up the model-invariant-id and model-version to check against persona data
+ Vertex modVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ "model-version-id", passedModelVersionId);
+ Vertex modVtx = getModelGivenModelVer( modVerVtx, "" );
+ String calcModId = modVtx.<String>property("model-invariant-id").orElse(null);
+ // Now we can look up instances that match this model's info
+ if( calcModId != null ){
+ startVerts = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),calcModId).has(addDBAliasedSuffix("model-version-id"),passedModelVersionId);
+ }
+ }
+ else if( passedModelInvId != null && !passedModelInvId.equals("") ){
+ // They gave us the model-invariant-id
+ startVerts = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),passedModelInvId);
+ }
+ else if( passedModelName != null && !passedModelName.equals("") ){
+ List<Vertex> modelVerVtxList = getModelVersUsingName(transId, fromAppId, passedModelName);
+ List<Vertex> startVtxList = new ArrayList<>();
+ // Need to look up the model-inv-ids and model-versions to check against persona data
+ if( !modelVerVtxList.isEmpty() ){
+ for( int i = 0; i < modelVerVtxList.size(); i++ ){
+ String calcModVerId = (modelVerVtxList.get(i)).<String>property("model-version-id").orElse(null);
+ Vertex modVtx = getModelGivenModelVer(modelVerVtxList.get(i),"");
+ String calcModInvId = modVtx.<String>property("model-invariant-id").orElse(null);
+ // Now we can look up instances that match this model's info
+ Iterator<Vertex> tmpStartIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(addDBAliasedSuffix("model-invariant-id"),calcModInvId).has(addDBAliasedSuffix("model-version-id"),calcModVerId);
+ while( tmpStartIter.hasNext() ){
+ Vertex tmpStartVert = (Vertex) tmpStartIter.next();
+ startVtxList.add(tmpStartVert);
+ }
+ }
+ }
+ if( !startVtxList.isEmpty() ){
+ startVerts = startVtxList.iterator();
+ }
+ }
+ }
+
+ if( startVerts != null ){
+ while( startVerts.hasNext() ){
+ Vertex tmpStartVert = (Vertex) startVerts.next();
+ String vid = tmpStartVert.id().toString();
+ String tmpModId = tmpStartVert.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
+ String tmpModVerId = tmpStartVert.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
+ startVertInfo.put(vid, tmpModVerId);
+ }
+ }
+ if( startVertInfo.isEmpty() ){
+ throw new AAIException("AAI_6114", "Start Node(s) could not be found for model data passed. " +
+ "(modelVersionId = [" + passedModelVersionId +
+ "], modelInvariantId = [" + passedModelInvId +
+ "], modelName = [" + passedModelName +
+ "])");
+ }
+
+ return startVertInfo;
+ }
+ else {
+ // Use start-node filter info to find start-node(s) - Note - there could also be model info passed that we'll need
+ // to use to trim down the set of start-nodes that we find based on the startNodeFilter data.
+ String modTopNodeType ="";
+ String modInfoStr = "";
+ if( passedModelVersionId != null && !passedModelVersionId.equals("") ){
+ modTopNodeType = getModelVerTopWidgetType( transId, fromAppId, passedModelVersionId, "", "" );
+ modInfoStr = "modelVersionId = (" + passedModelVersionId + ")";
+ }
+ else if( passedModelInvId != null && !passedModelInvId.equals("") ){
+ modTopNodeType = getModelVerTopWidgetType( transId, fromAppId,"", passedModelInvId, "" );
+ modInfoStr = "modelId = (" + passedModelInvId + ")";
+ }
+ else if( passedModelName != null && !passedModelName.equals("") ){
+ modTopNodeType = getModelVerTopWidgetType( transId, fromAppId,"", "", passedModelName );
+ modInfoStr = "modelName = (" + passedModelName + ")";
+ }
+
+ if( modTopNodeType.equals("") ){
+ if( (passedTopNodeType == null) || passedTopNodeType.equals("") ){
+ String msg = "Could not determine the top-node nodeType for this request. modelInfo: [" + modInfoStr + "]";
+ throw new AAIException("AAI_6118", msg);
+ }
+ else {
+ // We couldn't find a top-model-type based on passed in model info, but they
+ // gave us a type to use -- so use it.
+ modTopNodeType = passedTopNodeType;
+ }
+ }
+ else {
+ // we did get a topNode type based on model info - make sure it doesn't contradict
+ // the passsed-in one (if there is one)
+ if( passedTopNodeType != null && !passedTopNodeType.equals("")
+ && !passedTopNodeType.equals(modTopNodeType) ){
+ throw new AAIException("AAI_6120", "topNodeType passed in [" + passedTopNodeType
+ + "] does not match nodeType derived for model info passed in: ["
+ + modTopNodeType + "]");
+ }
+ }
+
+ List<String> modelVersionIds2Check = new ArrayList<>();
+ if( (passedModelName != null && !passedModelName.equals("")) ){
+ // They passed a modelName, so find all the model UUIDs (model-version-id's) that map to this
+ modelVersionIds2Check = getModelVerIdsUsingName(transId, fromAppId, passedModelName);
+ }
+ if( (passedModelVersionId != null && !passedModelVersionId.equals("")) ){
+ // They passed in a modelNameVersionId
+ if( modelVersionIds2Check.isEmpty() ){
+ // There was no modelName passed, so we can use the passed modelNameVersionId
+ modelVersionIds2Check.add(passedModelVersionId);
+ }
+ else if( modelVersionIds2Check.contains(passedModelVersionId) ){
+ // The passed in uuid does not conflict with what we got using the passed-in modelName.
+ // We'll just use the passed in uuid in this case.
+ // Hopefully they would not be passing strange combinations like this, but we'll try to deal with it.
+ modelVersionIds2Check = new ArrayList<>(); // Clear out what we had
+ modelVersionIds2Check.add(passedModelVersionId);
+ }
+ }
+
+ // We should now be OK with our topNodeType for this request, so we can look for the actual startNodes
+ for( int i=0; i < startNodeFilterArrayOfHashes.size(); i++ ){
+ // Locate the starting node which will be used to look which corresponds to this set of filter data
+ Vertex startVtx = null;
+ try {
+ Optional<Vertex> result = dbMethHelper.searchVertexByIdentityMap(modTopNodeType, startNodeFilterArrayOfHashes.get(i));
+ if (!result.isPresent()) {
+ throw new AAIException("AAI_6114", "No Node of type " + modTopNodeType + " found for properties");
+ }
+ startVtx = result.get();
+ }
+ catch( AAIException e ){
+ String msg = "Could not find startNode of type = [" + modTopNodeType + "], given these params: "
+ + startNodeFilterArrayOfHashes.get(i) + ". msg # from getUniqueNode() = " + e.getMessage();
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ String vid = startVtx.id().toString();
+ String personaModInvId = startVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
+ String personaModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
+
+ // Either this start-node has persona info (which should not contradict any passed-in model info)
+ // or they should have passed in the model to use - so we'd just use that.
+ if( personaModVerId != null && !personaModVerId.equals("") ){
+ // There is persona data in this start-node. So make sure it doesn't contradict any "passed" stuff
+ if( modelVersionIds2Check.isEmpty()
+ && (passedModelInvId == null || passedModelInvId.equals("")) ){
+ // They didn't pass any model info, so use the persona one.
+ startVertInfo.put(vid, personaModVerId);
+ }
+ else if( modelVersionIds2Check.isEmpty()
+ && (passedModelInvId != null && !passedModelInvId.equals("")) ){
+ // They passed in just the modelId - so check it
+ if( passedModelInvId.equals(personaModInvId) ){
+ startVertInfo.put(vid, personaModVerId);
+ }
+ }
+ else if( !modelVersionIds2Check.isEmpty()
+ && (passedModelInvId == null || passedModelInvId.equals("")) ){
+ // They passed in just modelVersionId - so check
+ if( modelVersionIds2Check.contains(personaModVerId) ){
+ startVertInfo.put(vid, personaModVerId);
+ }
+ }
+ else if( !modelVersionIds2Check.isEmpty()
+ && (passedModelInvId != null && !passedModelInvId.equals("")) ){
+ // We have BOTH a modelVersionIds and a modelId to check
+ if( passedModelInvId.equals(personaModInvId)
+ && modelVersionIds2Check.contains(personaModVerId) ){
+ startVertInfo.put(vid, personaModVerId);
+ }
+ }
+ }
+ else {
+ // This start node did not have persona info -- so we will use the passed in model info if they passed one
+ if( passedModelVersionId!= null && !passedModelVersionId.equals("") ){
+ // The model-version-id uniquely identifies a model-ver, so we can use it.
+ startVertInfo.put(vid, passedModelVersionId);
+ }
+ else {
+ throw new AAIException("AAI_6118", "Found startNode but since it does not have persona data, the " +
+ " model-version-id is required. ");
+ }
+ }
+ }
+ }
+
+ return startVertInfo;
+
+ }//end of getStartNodesAndModVersionIds()
+
+
+ /**
+ * Query by model. (really model-ver)
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelVersionId the model-version-id (unique id in model-ver)
+ * @param modelInvariantId the model-invariant-id (unique id in model)
+ * @param modelName the model name
+ * @param topNodeType - optional (needed if neither model-invariant-id nor model-version-id is passed)
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes -- optional (used to locate the first node(s) of instance data)
+ * @param apiVer the api ver
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> queryByModel( String transId, String fromAppId,
+ String modelVersionId,
+ String modelInvariantId,
+ String modelName,
+ String topNodeType,
+ List<Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer )
+ throws AAIException{
+
+ final String transId_f = transId;
+ final String fromAppId_f = fromAppId;
+ final String modelVersionId_f = modelVersionId;
+ final String modelInvId_f = modelInvariantId;
+ final String modelName_f = modelName;
+ final String topNodeType_f = topNodeType;
+ final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
+ final String apiVer_f = apiVer;
+
+ // Find out what our time-limit should be
+ int timeLimitSec = 0;
+ String timeLimitString = AAIConfig.get("aai.model.query.timeout.sec");
+ if( timeLimitString != null && !timeLimitString.equals("") ){
+ try {
+ timeLimitSec = Integer.parseInt(timeLimitString);
+ }
+ catch ( Exception nfe ){
+ // Don't worry, we will leave the limit as zero - which tells us not to use it.
+ }
+ }
+
+ if( timeLimitSec <= 0 ){
+ // We will NOT be using a timer
+ return queryByModel_Timed( transId, fromAppId,
+ modelVersionId,
+ modelInvariantId,
+ modelName,
+ topNodeType,
+ startNodeFilterArrayOfHashes,
+ apiVer );
+ }
+
+ List<ResultSet> resultList = new ArrayList<>();
+ TimeLimiter limiter = new SimpleTimeLimiter();
+ try {
+ resultList = limiter.callWithTimeout(new Callable <List<ResultSet>>() {
+ public List<ResultSet> call() throws AAIException {
+ return queryByModel_Timed( transId_f, fromAppId_f,
+ modelVersionId_f,
+ modelInvId_f,
+ modelName_f,
+ topNodeType_f,
+ startNodeFilterArrayOfHashes_f,
+ apiVer_f );
+ }
+ }, timeLimitSec, TimeUnit.SECONDS, true);
+ }
+ catch (AAIException ae) {
+ // Re-throw AAIException so we get can tell what happened internally
+ throw ae;
+ }
+ catch (UncheckedTimeoutException ute) {
+ throw new AAIException("AAI_6140", "Query Processing Limit exceeded. (limit = " + timeLimitSec + " seconds)");
+ }
+ catch (Exception e) {
+ throw new AAIException("AAI_6128", "Unexpected exception in queryByModel(): " + e.getMessage() );
+ }
+
+ return resultList;
+ }
+
+
+ /**
+ * Query by model (model-ver) timed.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelVersionId the model-version-id (unique id in model-ver)
+ * @param modelInvariantId the model-invariant-id (unique id in model)
+ * @param modelName the model name
+ * @param topNodeType the top node type
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes
+ * @param apiVer the api ver
+ * @return the array list
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> queryByModel_Timed( String transId, String fromAppId,
+ String modelVersionId,
+ String modelInvariantId,
+ String modelName,
+ String topNodeType,
+ List<Map<String,Object>> startNodeFilterArrayOfHashesVal,
+ String apiVer )
+ throws AAIException{
+
+ List<ResultSet> resultArray = new ArrayList<>();
+
+ // NOTE: this method can be used for different styles of queries:
+ // a) They could pass neither a modelVersionId or a modelInvariantId but just pass a set of data defining start-nodes.
+ // Note - with no model info, we need them to pass the startNodeType for us to be able to use the
+ // start-node-filter data. We would look at each start node and ensure that each has persona-model info.
+ // Then use whatever model corresponds to each instance to pull that instance's data.
+ // b) They could pass a modelInvariantId, but no modelVersionId and no startNode info. In this case, we
+ // Would look in the database for all nodes that have a model-invariant-id-local that matches what was
+ // passed, and then for each of those instances, pull the data based on the corresponding model.
+ // c) They could pass a model-version-id, but no startNode info. We'd make sure that if a
+ // model-invariant-id was also passed, that it does not conflict - but it really should be null if they
+ // are passing a full model-version-id. Like case -b-, we'd do a query for all nodes
+ // that have persona info that corresponds to the model-version-id passed and then
+ // collect data for each one.
+ // d) They could pass either modelVersionId or modelInvariantId AND startNodeFilter info. In this case we
+ // would look at the model info to figure out what the top-node-type is, then look at the
+ // top-node instances based on the startNodeFilter. We'd only collect data for each instance if
+ // it's persona model info matches what was passed in.
+
+
+ // Sorry to do this, but code that gets called with an empty hash as the first array element was causing errors
+ List<Map<String,Object>> startNodeFilterArrayOfHashes = new ArrayList <Map<String,Object>>();
+ if( !startNodeFilterArrayOfHashesVal.isEmpty() ){
+ Map<String,Object> tmpH = startNodeFilterArrayOfHashesVal.get(0);
+ if( !tmpH.isEmpty() ){
+ for( int i=0; i < startNodeFilterArrayOfHashesVal.size(); i++ ){
+ startNodeFilterArrayOfHashes.add( startNodeFilterArrayOfHashesVal.get(i) );
+ }
+ }
+ }
+
+ // ----------------------------------------------------------------------------------------------------------
+ // Get a Hash of all the start-nodes (top instance-data node for a model-ver where we will
+ // start collecting data) for startNode2ModelVerHash:
+ // key = vertex-id for the startNode,
+ // value = model-version-id for the corresponding model-ver
+ // ----------------------------------------------------------------------------------------------------------
+ Map<String, String> startNode2ModelVerHash = getStartNodesAndModVersionIds( transId, fromAppId,
+ modelVersionId, modelInvariantId, modelName, topNodeType,
+ startNodeFilterArrayOfHashes, apiVer );
+
+ //System.out.println("\nDEBUG -- Here's a dump of the startnodes/model-vers: " + startNode2ModelVerHash.toString());
+
+ // --------------------------------------------------------------------------------------------------------
+ // Figure out what-all models (model-ver nodes) we will be dealing with
+ // Note - Instances must all use the same type of start-node, but do not have to all use the same model-ver.
+ // --------------------------------------------------------------------------------------------------------
+ Map<String, Vertex> distinctModelVersHash = new HashMap<>();
+ // For distinctModelVersHash: key = modelVersionId, val= modelVerVertex
+ String startNodeType = "";
+ if( topNodeType != null && !topNodeType.equals("") ){
+ startNodeType = topNodeType;
+ }
+
+ List<String> skipModelVerIdList = new ArrayList<>();
+ List<String> skipStartVertVerIdList = new ArrayList<>();
+ Set <String> snKeySet = startNode2ModelVerHash.keySet();
+ Iterator<String> startNodeIterator = snKeySet.iterator();
+ while( startNodeIterator.hasNext() ){
+ String modVerIdKey = (String) startNodeIterator.next();
+ String modVerId = startNode2ModelVerHash.get(modVerIdKey);
+ if( !distinctModelVersHash.containsKey(modVerId) ){
+ // First time seeing this model-version-id
+ Vertex modVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ "model-version-id", modVerId);
+ String tmpNodeType = "";
+ try {
+ tmpNodeType = getModelVerTopWidgetType( modVerVtx, "" );
+ }
+ catch( AAIException ae ){
+ // There must be some old bad data in the db - we will skip over this model-ver since its
+ // model is not good anymore - but will log that this is happening.
+ skipModelVerIdList.add(modVerId);
+ skipStartVertVerIdList.add(modVerIdKey);
+ System.out.println(">>> WARNING - will not collect model data for this vertex since " +
+ "it uses an inconsistant model-ver model. Model-version-id = " + modVerId );
+ }
+
+ if( tmpNodeType != null && !tmpNodeType.equals("") ){
+ if( startNodeType.equals("") ){
+ startNodeType = tmpNodeType;
+ }
+ else if( !startNodeType.equals(tmpNodeType) ){
+ String msg = "Conflict between startNode types for models involved: [" + startNodeType
+ + "], [" + tmpNodeType + "]";
+ throw new AAIException("AAI_6125", msg);
+ }
+ distinctModelVersHash.put(modVerId, modVerVtx);
+ }
+ }
+ }
+
+ //System.out.println("\nDEBUG -- Here's a dump of the DISTINCT model-ver hash: " + distinctModelVersHash.toString() );
+
+ // ------------------------------------------------------------------------------------------------------
+ // Get the "valid-next-step" hash for each distinct model-ver
+ // While we're at it, get a mapping of model-invariant-id|model-version to model-version-id for
+ // the model-vers being used
+ // ------------------------------------------------------------------------------------------------------
+ Map<String, Multimap<String, String>> validNextStepHash = new HashMap<>();
+ // validNextStepHash: key = modelVerId, value = nextStepMap
+ Set <String> keySet = distinctModelVersHash.keySet();
+ Iterator<String> modelVerIterator = keySet.iterator();
+ while( modelVerIterator.hasNext() ){
+ String modVerKey = (String) modelVerIterator.next();
+ if( ! skipModelVerIdList.contains(modVerKey) ){
+ Vertex modelVerVtx = (Vertex)distinctModelVersHash.get(modVerKey);
+ Multimap<String, String> tmpTopoMap = genTopoMap4ModelVer( transId, fromAppId,
+ modelVerVtx, modVerKey);
+ validNextStepHash.put(modVerKey, tmpTopoMap);
+ }
+ }
+
+ // -------------------------------------------------------------------------------------------------
+ // Figure out what the "start-node" for each instance will be (plus the info we will use to
+ // represent that in our topology)
+ // -------------------------------------------------------------------------------------------------
+ List<String> failedPersonaCheckVids = new ArrayList<>();
+ Map<String, String> firstStepInfoHash = new HashMap<>();
+ // For firstStepInfoHash: key = startNodeVtxId, val=topNodeType plus personaData if applicable
+ // ie. the value is what we'd use as the "first-step" for this model.
+ if( !nodeTypeSupportsPersona( startNodeType) ){
+ // This node type doesn't have persona info, so we just use startNodeType for the first-step-info
+ snKeySet = startNode2ModelVerHash.keySet();
+ startNodeIterator = snKeySet.iterator();
+ while( startNodeIterator.hasNext() ){
+ String vtxKey = (String) startNodeIterator.next();
+ firstStepInfoHash.put(vtxKey,startNodeType);
+ }
+ }
+ else {
+ // Need to check that this node's persona data is good and if it is - use it for the first step info
+ snKeySet = startNode2ModelVerHash.keySet();
+ startNodeIterator = snKeySet.iterator();
+ while( startNodeIterator.hasNext() ){
+ String vtxKey = (String) startNodeIterator.next();
+ Iterator<Vertex> vtxIterator = this.engine.asAdmin().getReadOnlyTraversalSource().V(vtxKey);
+ Vertex tmpVtx = (Vertex)vtxIterator.next();
+ String thisVtxModelVerId = startNode2ModelVerHash.get(vtxKey);
+ if( skipModelVerIdList.contains(thisVtxModelVerId) ){
+ // Skip this vertex because it uses a model-ver that is bad
+ continue;
+ }
+ Vertex modelVerVtx = (Vertex)distinctModelVersHash.get(thisVtxModelVerId);
+ Vertex modelVtx = getModelGivenModelVer( modelVerVtx, "" );
+ String modInvId = modelVtx.<String>property("model-invariant-id").orElse(null);
+ String personaModInvId = tmpVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
+ String personaModVerId = tmpVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
+ if( modInvId.equals(personaModInvId) && thisVtxModelVerId.equals(personaModVerId) ){
+ String tmpPersonaInfoStr = startNodeType + "," + personaModInvId + "," + personaModVerId;
+ firstStepInfoHash.put(vtxKey, tmpPersonaInfoStr );
+ }
+ else {
+ // we won't use this start node below when we collect data because it should have
+ // had persona data that matched it's model - but it did not.
+ failedPersonaCheckVids.add(vtxKey);
+ }
+ }
+ }
+
+ //System.out.println("\nDEBUG -- Here's a dump of the firstStepInfoHash hash: " + firstStepInfoHash.toString() );
+
+ // ------------------------------------------------------------------------------------------------
+ // Loop through each start-node, collect it's data using collectInstanceData() and put the
+ // resultSet onto the resultArray.
+ // ------------------------------------------------------------------------------------------------
+
+ // Make sure they're not bringing back too much data
+ String maxString = AAIConfig.get("aai.model.query.resultset.maxcount");
+ if( maxString != null && !maxString.equals("") ){
+ int maxSets = 0;
+ try {
+ maxSets = Integer.parseInt(maxString);
+ }
+ catch ( Exception nfe ){
+ // Don't worry, we will leave the max as zero - which tells us not to use it.
+ }
+
+ if( maxSets > 0 && (startNode2ModelVerHash.size() > maxSets) ){
+ String msg = " Query returns " + startNode2ModelVerHash.size() + " resultSets. Max allowed is: " + maxSets;
+ throw new AAIException("AAI_6141", msg);
+ }
+ }
+
+ snKeySet = startNode2ModelVerHash.keySet();
+ startNodeIterator = snKeySet.iterator();
+ while( startNodeIterator.hasNext() ){
+ String topNodeVtxId = (String) startNodeIterator.next();
+ if( failedPersonaCheckVids.contains(topNodeVtxId) ){
+ // Skip this vertex because it failed it's persona-data check above
+ continue;
+ }
+ if( skipStartVertVerIdList.contains(topNodeVtxId) ){
+ // Skip this vertex because it uses a model-ver that is bad
+ continue;
+ }
+
+ Iterator<Vertex> vtxIterator = this.engine.asAdmin().getReadOnlyTraversalSource().V(topNodeVtxId);
+ Vertex tmpStartVtx = (Vertex)vtxIterator.next();
+ String elementLocationTrail = firstStepInfoHash.get(topNodeVtxId);
+ String modelVerId = startNode2ModelVerHash.get(topNodeVtxId);
+ Multimap<String, String> validNextStepMap = validNextStepHash.get(modelVerId);
+
+ List<String> vidsTraversed = new ArrayList<>();
+ Map<String,String> emptyDelKeyHash = new HashMap<>();
+ Map<String,String> emptyNQElementHash = new HashMap<>(); // Only applies to Named Queries
+ ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
+ tmpStartVtx, elementLocationTrail,
+ validNextStepMap, vidsTraversed, 0, emptyDelKeyHash, emptyNQElementHash, apiVer );
+
+ resultArray.add(tmpResSet);
+ }
+
+ return resultArray;
+
+ }// queryByModel_Timed()
+
+
+
+ /**
+ * Run delete by model-ver.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelVersionId the model version id -- unique id for a model-ver node
+ * @param topNodeTypeVal the top node type val -- required if no model-version-id is passed
+ * @param startNodeFilterHash the start node filter hash -- used to locate the first node of instance data
+ * @param apiVer the api ver
+ * @param resVersion the res version -- resourceVersion of the top/first widget in the model instance
+ * @return HashMap (keys = vertexIds that were deleted)
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,String> runDeleteByModel( String transId, String fromAppId,
+ String modelVersionId, String topNodeTypeVal, Map<String,Object> startNodeFilterHash, String apiVer, String resVersion )
+ throws AAIException{
+
+ Map<String,String> retHash = new HashMap<>();
+
+ // Locate the Model-ver node to be used
+ Vertex modelVerVtx = null;
+ if( modelVersionId != null && !modelVersionId.equals("") ){
+ modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ "model-version-id", modelVersionId);
+ }
+ else {
+ // if they didn't pass the modelVersionId, then we need to use the startNode to figure it out
+ // Locate the starting node based on the start node params
+ if( topNodeTypeVal == null || topNodeTypeVal.equals("") ){
+ throw new AAIException("AAI_6118", "If no model info is passed, then topNodeType is required. ");
+ }
+
+ Optional<Vertex> result = dbMethHelper.searchVertexByIdentityMap(topNodeTypeVal, startNodeFilterHash);
+ if (!result.isPresent()) {
+ throw new AAIException("AAI_6114", "No Node of type " + topNodeTypeVal + " found for properties");
+ }
+ Vertex startVtx = result.get();
+
+ String startVertModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
+ modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ "model-version-id", startVertModVerId);
+ }
+
+ if( modelVerVtx == null ){
+ throw new AAIException("AAI_6114", "Could not determine the model-ver for the given input parameters. ");
+ }
+
+ String topNType = "unknown";
+ String modelType = getModelTypeFromModelVer( modelVerVtx, "" );
+
+ if( modelType.equals("widget") ){
+ // If they want to delete using a widget-level model.. That is just a delete of the one
+ // instance of one of our nodes.
+ String widgModNodeType = modelVerVtx.<String>property("model-name").orElse(null);
+ if( (widgModNodeType == null) || widgModNodeType.equals("") ){
+ String msg = "Could not find model-name for the widget model [" + modelVersionId + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+ Optional<Vertex> result = dbMethHelper.locateUniqueVertex(widgModNodeType, startNodeFilterHash);
+ if (!result.isPresent()) {
+ throw new AAIException("AAI_6114", "No Node of type " + topNType + " found for properties");
+ }
+ Vertex widgetVtx = result.get();
+ String widgId = widgetVtx.id().toString();
+ serializer.delete(widgetVtx, resVersion, true);
+ retHash.put(widgId, widgModNodeType);
+ return retHash;
+ }
+
+ // ---------------------------------------------------------------------------------
+ // If we got to here, this must be either a service or resource model.
+ // So, we'll need to get a Hash of which parts of the model to delete.
+ // NOTE- deleteByModel is deleting data based on one specific version of a model.
+ // ---------------------------------------------------------------------------------
+ String chkFirstNodePersonaModInvId = "";
+ String chkFirstNodePersonaModVerId = "";
+ String personaData = "";
+ Vertex firstModElementVertex = getTopElementForSvcOrResModelVer( modelVerVtx, "" );
+ topNType = getModElementWidgetType( firstModElementVertex, "" );
+ if( (topNType == null) || topNType.equals("") ){
+ String msg = "Could not determine the top-node nodeType for model-version-id: [" + modelVersionId + "]";
+ throw new AAIException("AAI_6132", msg);
+ }
+ if( nodeTypeSupportsPersona(topNType) ){
+ Vertex modelVtx = getModelGivenModelVer(modelVerVtx,"");
+ chkFirstNodePersonaModInvId = modelVtx.<String>property("model-invariant-id").orElse(null);
+ chkFirstNodePersonaModVerId = modelVerVtx.<String>property("model-version-id").orElse(null);
+ personaData = "," + chkFirstNodePersonaModInvId + "," + chkFirstNodePersonaModVerId;
+ }
+
+ // Get the deleteKeyHash for this model
+ String incomingTrail = "";
+ Map<String, String> currentHash = new HashMap<>();
+ Map<String, Vertex> modConHash = new HashMap<>();
+ ArrayList <String> vidsTraversed = new ArrayList<>();
+ Map<String, String> delKeyHash = collectDeleteKeyHash( transId, fromAppId,
+ firstModElementVertex, incomingTrail, currentHash, vidsTraversed,
+ 0, modConHash,
+ chkFirstNodePersonaModInvId, chkFirstNodePersonaModVerId );
+
+
+ System.out.println("\n ----DEBUG -----: Delete Hash for model: [" + modelVersionId + "] looks like: ");
+ for( Map.Entry<String, String> entry : delKeyHash.entrySet() ){
+ System.out.println("key = [" + entry.getKey() + "], val = [" + entry.getValue() + "]");
+ }
+ System.out.println("\n -----");
+ // Locate the starting node that we'll use to start looking for instance data
+ Optional<Vertex> result = dbMethHelper.searchVertexByIdentityMap(topNType, startNodeFilterHash);
+ if (!result.isPresent()) {
+ throw new AAIException("AAI_6114", "No Node of type " + topNType + " found for properties");
+ }
+ Vertex startVtx = result.get();
+ if( !chkFirstNodePersonaModInvId.equals("") ){
+ // NOTE: For Service or Resource models, if this is a nodeType that supports persona's, then
+ // we need to make sure that the start node matches the persona values.
+ String startVertPersonaModInvId = startVtx.<String>property(addDBAliasedSuffix("model-invariant-id")).orElse(null);
+ String startVertPersonaModVerId = startVtx.<String>property(addDBAliasedSuffix("model-version-id")).orElse(null);
+ if( !chkFirstNodePersonaModInvId.equals(startVertPersonaModInvId)
+ || !chkFirstNodePersonaModVerId.equals(startVertPersonaModVerId) ){
+ String msg = "Persona-Model data mismatch for start node (" + topNType + "), " +
+ startNodeFilterHash ;
+ throw new AAIException("AAI_6114", msg);
+ }
+ }
+ String topVid = startVtx.id().toString();
+
+ // Read the model-ver into a Map for processing
+ Multimap <String, String> validNextStepMap = genTopoMap4ModelVer(transId, fromAppId,
+ modelVerVtx, modelVersionId);
+
+ // Collect the data
+ String elementLocationTrail = topNType + personaData;
+ vidsTraversed = new ArrayList<>();
+ Map<String,String> emptyHash = new HashMap<>();
+
+ // Pass emptyHash for the NQElement hash since that parameter only applies to Named Queries
+ ResultSet retResSet = collectInstanceData( transId, fromAppId,
+ startVtx, elementLocationTrail,
+ validNextStepMap, vidsTraversed, 0, delKeyHash, emptyHash, apiVer );
+
+ // Note: the new ResultSet will have each element tagged with the del flag so we'll know if it
+ // should be deleted or not - so loop through the results in a try-block since some things
+ // will get auto-deleted by parents before we get to them --- and try to remove each one.
+ String vidToResCheck = topVid;
+
+ retHash = deleteAsNeededFromResultSet( transId, fromAppId, retResSet,
+ vidToResCheck, apiVer, resVersion, emptyHash );
+ //String msgStr = "processed deletes for these vids: (\n"+ retHash.keySet().toString() + ").";
+
+ return retHash;
+
+ }// End of runDeleteByModel()
+
+
+
+ /**
+ * Delete as needed from result set.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param resSet the res set
+ * @param vidToResCheck -- this vertex will need to have its resource-version checked
+ * @param apiVer the api ver
+ * @param resVersion the res version
+ * @param hashSoFar the hash so far -- hash of what's been deleted so far
+ * @return String
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,String> deleteAsNeededFromResultSet( String transId, String fromAppId,
+ ResultSet resSet, String vidToResCheck, String apiVer, String resVersion, Map<String,String> hashSoFar )
+ throws AAIException
+ {
+ Map<String,String> retHash = new HashMap<>();
+ retHash.putAll( hashSoFar );
+ Boolean deleteIt = false;
+
+ if( resSet.getVert() == null ){
+ return retHash;
+ }
+
+ Vertex thisVtx = resSet.getVert();
+ String thisGuyId = "";
+ String thisNT = "";
+ String thisGuyStr = "";
+
+ Boolean gotVtxOK = false;
+ try {
+ if( thisVtx != null ){
+ thisGuyId = thisVtx.id().toString();
+ thisNT = thisVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ thisGuyStr = thisGuyId + "[" + thisNT + " found at:" + resSet.getLocationInModelSubGraph() + "]";
+
+ // NOTE -- will try to set the NodeType to itself to see if the node has been deleted already in
+ // this transaction. It lets you get properties from nodes being deleted where the
+ // delete hasn't been committed yet. This check used to be accomplished with a call to
+ // "vtx.isRemoved()" but that was a Titan-only feature and is not available anymore since
+ // we no longer use Titan vertices.
+ // If we don't do this check, we get errors later when we try to delete the node.
+ thisVtx.property(AAIProperties.NODE_TYPE, thisNT);
+ gotVtxOK = true;
+ }
+ }
+ catch (Exception ex) {
+ // Sometimes things have already been deleted by the time we get to them - just log it.
+ LOGGER.warn("Exception when trying to delete: " + thisGuyStr + ". msg = " + ex.getMessage(), ex);
+ }
+
+ if( !gotVtxOK ){
+ // The vertex must have already been removed. Just return.
+ // Note - We need to catch this because the DB sometimes can still have the vtx
+ // and be able to get its ID but it is flagged internally as removed already.
+ return retHash;
+ }
+ else {
+ if( resSet.getNewDataDelFlag() != null && resSet.getNewDataDelFlag().equals("T") ){
+ LOGGER.info(">> will try to delete this one >> " + thisGuyStr);
+
+ try {
+ Boolean requireResourceVersion = false;
+ if( thisGuyId.equals(vidToResCheck) ){
+ // This is the one vertex that we want to check the resourceId before deleting
+ requireResourceVersion = true;
+ }
+ this.serializer.delete(thisVtx, resVersion, requireResourceVersion);
+ }
+ catch (AAIException ae) {
+ String errorCode = ae.getErrorObject().getErrorCode();
+ if ( errorCode.equals("6130") || errorCode.equals("6131") ) {
+ // They didn't pass the correct resource-version for the top node.
+ throw ae;
+ }
+ else {
+ String errText = ae.getErrorObject().getErrorText();
+ String errDetail = ae.getMessage();
+ LOGGER.warn("Exception when deleting " + thisGuyStr + ". ErrorCode = " + errorCode +
+ ", errorText = " + errText + ", details = " + errDetail);
+ }
+ }
+ catch( Exception e ){
+ // We'd expect to get a "node not found" here sometimes depending on the order that
+ // the model has us finding / deleting nodes.
+ // Ignore the exception - but log it so we can see what happened.
+ LOGGER.warn("Exception when deleting " + thisGuyStr + e.getMessage(), e);
+ }
+
+ // We can't depend on a thrown exception to tell us if a node was deleted since it may
+ // have been auto=deleted before this removeAaiNode() call.
+ // --- Not sure if we would want to check anything here -- because the graph.commit() is done outside of this call.
+
+ deleteIt = true;
+ }
+ else {
+ // --- DEBUG ----
+ System.out.println(">>>>>>> NOT DELETING THIS ONE >>>> " + thisGuyStr );
+ List<String> retArr = dbMethHelper.getVertexProperties(thisVtx);
+ for( String info : retArr ){ System.out.println(info); }
+ // --- DEBUG ----
+ }
+ }
+
+ // Now call this routine for the sub-resultSets
+ List <ResultSet> subResultSetList = resSet.getSubResultSet();
+ Iterator <ResultSet> subResSetIter = subResultSetList.iterator();
+ while( subResSetIter.hasNext() ){
+ ResultSet tmpSubResSet = subResSetIter.next();
+ retHash = deleteAsNeededFromResultSet( transId, fromAppId, tmpSubResSet,
+ vidToResCheck, apiVer, resVersion, retHash );
+ }
+
+ if( deleteIt ){
+ retHash.put(thisGuyId, thisGuyStr);
+ }
+
+ return retHash;
+
+ }// deleteAsNeededFromResultSet()
+
+
+
+ /**
+ * Query by named query (old version).
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryUuid the named query uuid
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes --used to locate the first nodes of instance data
+ * @param apiVer the api ver
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> queryByNamedQuery( String transId, String fromAppId,
+ String namedQueryUuid,
+ ArrayList <Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer )
+ throws AAIException{
+
+ String dummyCutPoint = null;
+ Map<String,Object> dummySecondaryFilterHash = null;
+
+ return queryByNamedQuery( transId, fromAppId,
+ namedQueryUuid,
+ startNodeFilterArrayOfHashes,
+ apiVer,
+ dummyCutPoint,
+ dummySecondaryFilterHash );
+ }
+
+
+ /**
+ * Query by named query.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryUuid the named query uuid
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes --used to locate the first nodes of instance data
+ * @param apiVer the api ver
+ * @param secondaryCutPoint nodeType where we will prune if secondary filter is not met
+ * @param secondaryFilterHash secondary filter params
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> queryByNamedQuery( String transId, String fromAppId,
+ String namedQueryUuid,
+ List<Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer,
+ String secondaryFilterCutPoint,
+ Map<String,Object> secondaryFilterHash )
+ throws AAIException{
+
+ final String transId_f = transId;
+ final String fromAppId_f = fromAppId;
+ final String namedQueryUuid_f = namedQueryUuid;
+ final List<Map<String,Object>> startNodeFilterArrayOfHashes_f = startNodeFilterArrayOfHashes;
+ final String apiVer_f = apiVer;
+ final String secondaryFilterCutPoint_f = secondaryFilterCutPoint;
+ final Map<String,Object> secondaryFilterHash_f = secondaryFilterHash;
+
+ // Find out what our time-limit should be
+ int timeLimitSec = 0;
+ String timeLimitString = AAIConfig.get("aai.model.query.timeout.sec");
+ if( timeLimitString != null && !timeLimitString.equals("") ){
+ try {
+ timeLimitSec = Integer.parseInt(timeLimitString);
+ }
+ catch ( Exception nfe ){
+ // Don't worry, we will leave the limit as zero - which tells us not to use it.
+ }
+ }
+
+ if( timeLimitSec <= 0 ){
+ // We will NOT be using a timer
+ return queryByNamedQuery_Timed( transId, fromAppId,
+ namedQueryUuid,
+ startNodeFilterArrayOfHashes,
+ apiVer,
+ secondaryFilterCutPoint_f,
+ secondaryFilterHash_f );
+ }
+
+ List<ResultSet> resultList = new ArrayList<>();
+ TimeLimiter limiter = new SimpleTimeLimiter();
+ try {
+ resultList = limiter.callWithTimeout(new Callable <List<ResultSet>>() {
+ public List<ResultSet> call() throws AAIException {
+ return queryByNamedQuery_Timed( transId_f, fromAppId_f,
+ namedQueryUuid_f,
+ startNodeFilterArrayOfHashes_f,
+ apiVer_f,
+ secondaryFilterCutPoint_f,
+ secondaryFilterHash_f );
+ }
+ }, timeLimitSec, TimeUnit.SECONDS, true);
+
+ }
+ catch (AAIException ae) {
+ // Re-throw AAIException so we get can tell what happened internally
+ throw ae;
+ }
+ catch (UncheckedTimeoutException ute) {
+ throw new AAIException("AAI_6140", "Query Processing Limit exceeded. (limit = " + timeLimitSec + " seconds)");
+ }
+ catch (Exception e) {
+ throw new AAIException("AAI_6128", "Unexpected exception in queryByNamedQuery(): " + e.getMessage() );
+ }
+
+ return resultList;
+ }
+
+
+ /**
+ * Query by named query timed.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryUuid the named query uuid
+ * @param startNodeFilterArrayOfHashes the start node filter array of hashes --used to locate the first nodes of instance data
+ * @param apiVer the api ver
+ * @param secondaryFilterCutPoint the nodeType where we will parse for the secondary Filter
+ * @param secondaryFilterHash the secondary filter hash
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> queryByNamedQuery_Timed( String transId, String fromAppId,
+ String namedQueryUuid,
+ List<Map<String,Object>> startNodeFilterArrayOfHashes,
+ String apiVer,
+ String secondaryFilterCutPoint,
+ Map<String,Object> secondaryFilterHash
+ )
+ throws AAIException{
+
+ // Locate the Query to be used
+ Vertex queryVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query",
+ "named-query-uuid", namedQueryUuid);
+
+ // Get the first/top named-query-element used by this query
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, queryVtx, "named-query-element");
+ Vertex firstNqElementVert = null;
+ int count = 0;
+ String topNType = "";
+ while( vertI != null && vertI.hasNext() ){
+ firstNqElementVert = vertI.next();
+ count++;
+ topNType = getNqElementWidgetType( transId, fromAppId, firstNqElementVert, "" );
+ }
+
+ if( count < 1 ){
+ // A named query must start with a single top element
+ throw new AAIException("AAI_6133", "No top-node defined for named-query-uuid = [" + namedQueryUuid + "]");
+ }
+ else if( count > 1 ){
+ // A named query should start with a single top element
+ throw new AAIException("AAI_6133", "More than one top-node defined for named-query-uuid = [" + namedQueryUuid + "]");
+ }
+ if( (topNType == null) || topNType.equals("") ){
+ String msg = "Could not determine the top-node nodeType for Named Query: [" + namedQueryUuid + "]";
+ throw new AAIException("AAI_6133", msg);
+ }
+
+ // Read the topology into a hash for processing
+ Multimap <String, String> validNextStepMap = genTopoMap4NamedQ(transId, fromAppId, queryVtx, namedQueryUuid);
+
+ List<Vertex> startVertList = new ArrayList<>();
+ if( startNodeFilterArrayOfHashes.size() == 1 ){
+ // If there is only one set of startFilter info given, then allow it to possibly not be
+ // defining just one start node.
+ Map<String, Object> cleanHash = new HashMap<>();
+ Map<String, Object> tmpHash = startNodeFilterArrayOfHashes.get(0);
+ Set <String> propKeySet = tmpHash.keySet();
+ Iterator<String> propIter = propKeySet.iterator();
+ Introspector obj = loader.introspectorFromName(topNType);
+ Set<String> keys = obj.getKeys();
+ boolean foundIndexedField = false;
+ int propertiesSet = 0;
+ while( propIter.hasNext() ){
+ String oldVtxKey = (String) propIter.next();
+ String newKey = oldVtxKey;
+ String [] parts = oldVtxKey.split("\\.");
+ if( parts.length == 2 ){
+ newKey = parts[1];
+ }
+ Object obVal = tmpHash.get(oldVtxKey);
+ if (obj.hasProperty(newKey)) {
+ if (keys.contains(newKey)) {
+ foundIndexedField = true;
+ }
+ obj.setValue(newKey, obVal);
+ propertiesSet++;
+ }
+ }
+ //we found all the properties in the startNodeType
+ if (propertiesSet == propKeySet.size()) {
+ if (foundIndexedField) {
+ QueryBuilder builder = this.engine.getQueryBuilder().exactMatchQuery(obj);
+ startVertList = builder.toList();
+ } else {
+ //force a filter from aai-node-type
+ QueryBuilder builder = this.engine.getQueryBuilder().createContainerQuery(obj).exactMatchQuery(obj);
+ startVertList = builder.toList();
+ }
+ } else {
+ Optional<Vertex> tmpVtx = dbMethHelper.searchVertexByIdentityMap(topNType, startNodeFilterArrayOfHashes.get(0));
+ // Only found one, so just use it.
+ if (tmpVtx.isPresent()) {
+ startVertList.add(tmpVtx.get());
+ }
+ }
+ }
+ else {
+ // Since they give an array of startNodeFilterHash info, we expect each one
+ // to just point to one node.
+ for( int i = 0; i < startNodeFilterArrayOfHashes.size(); i++ ){
+ // Locate the starting node for each set of data
+ Optional<Vertex> tmpVtx = dbMethHelper.searchVertexByIdentityMap(topNType, startNodeFilterArrayOfHashes.get(i));
+ if (tmpVtx.isPresent()) {
+ startVertList.add(tmpVtx.get());
+ }
+ }
+ }
+
+ if (startVertList.isEmpty()) {
+ throw new AAIException("AAI_6114", "No Node of type " + topNType + " found for properties");
+ }
+ // Make sure they're not bringing back too much data
+ String maxString = AAIConfig.get("aai.model.query.resultset.maxcount");
+ if( maxString != null && !maxString.equals("") ){
+ int maxSets = Integer.parseInt(maxString);
+ if( startVertList.size() > maxSets ){
+ String msg = " Query returns " + startVertList.size() + " resultSets. Max allowed is: " + maxSets;
+ throw new AAIException("AAI_6141", msg);
+ }
+ }
+
+ // Loop through each start node and get its data
+ List<ResultSet> resSetList = new ArrayList<>();
+ for( int i = 0; i < startVertList.size(); i++ ){
+ Vertex startVtx = startVertList.get(i);
+ // Collect the data
+ String elementLocationTrail = topNType;
+ ArrayList <String> vidsTraversed = new ArrayList<>();
+ Map<String,String> emptyDelKeyHash = new HashMap<>(); // Does not apply to Named Queries
+
+ // Get the mapping of namedQuery elements to our widget topology for this namedQuery
+ String incomingTrail = "";
+ Map<String, String> currentHash = new HashMap<>();
+
+ Map<String,String> namedQueryElementHash = collectNQElementHash( transId, fromAppId,
+ firstNqElementVert, incomingTrail, currentHash, vidsTraversed, 0 );
+
+ vidsTraversed = new ArrayList<>();
+ ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
+ startVtx, elementLocationTrail,
+ validNextStepMap, vidsTraversed, 0, emptyDelKeyHash, namedQueryElementHash, apiVer );
+ resSetList.add(tmpResSet);
+ }
+
+ // If a secondary filter was defined, we will prune the collected instance data result set(s) based on it.
+ List<ResultSet> prunedResSetList = new ArrayList<>();
+ if( resSetList != null && !resSetList.isEmpty() ){
+ for( int i = 0; i < resSetList.size(); i++ ){
+ if( secondaryFilterCutPoint == null || secondaryFilterCutPoint.equals("") || secondaryFilterHash == null ){
+ // They didn't want to do any pruning, so just use the results we already had
+ prunedResSetList.add(resSetList.get(i));
+ }
+ else {
+ ResultSet tmpResSet = pruneResultSet(resSetList.get(i), secondaryFilterCutPoint, secondaryFilterHash);
+ if( tmpResSet != null ){
+ prunedResSetList.add(tmpResSet);
+ }
+ }
+ }
+ }
+
+ // Since a NamedQuery can mark some nodes as "do-not-display", we need to collapse our resultSet so
+ // does not display those nodes.
+ List<ResultSet> collapsedResSetList = new ArrayList<>();
+ if( prunedResSetList != null && !prunedResSetList.isEmpty() ){
+ for( int i = 0; i < prunedResSetList.size(); i++ ){
+ // Note - a single resultSet could be collapsed into many smaller ones if they
+ // marked all the "top" node-elements as do-not-output. Ie. the query may
+ // have had a top-node of "generic-vnf" which joins down to different l-interfaces.
+ // If they only want to see the l-interfaces, then a single result set
+ // would be "collapsed" into many separate resultSets - each of which is
+ // just a single l-interface.
+ List<ResultSet> tmpResSetList = collapseForDoNotOutput(prunedResSetList.get(i));
+ if( tmpResSetList != null && !tmpResSetList.isEmpty() ){
+ for( int x = 0; x < tmpResSetList.size(); x++ ){
+ //showResultSet( tmpResSetList.get(x), 0 ); //DEBUG-- this was just for testing
+ collapsedResSetList.add(tmpResSetList.get(x));
+ }
+ }
+ }
+ }
+
+ return collapsedResSetList;
+
+ }// End of queryByNamedQuery()
+
+
+ /**
+ * Prune a result set as per a secondary filter.
+ *
+ * @param resSetVal the res set val
+ * @param cutPoint the nodeType where the trim will happen
+ * @param secFilterHash hash of properties and values to use as the secondary filter
+ * @return pruned result set
+ * @throws AAIException the AAI exception
+ */
+ public ResultSet pruneResultSet( ResultSet resSetVal, String cutPointType, Map<String,Object> secFilterHash )
+ throws AAIException {
+
+ // Given a ResultSet and some secondary filter info, do pruning as needed
+ ResultSet pResSet = new ResultSet();
+
+ // For this ResultSet, we will see if we are on a node of the type that is our cutPoint;
+ // then only keep it if we peek "below" and see a match for our filter.
+
+ String nt = resSetVal.getVert().<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( nt != null && nt.equals(cutPointType) ){
+ // We are on the type of node that may need to be "pruned" along with it's sub-results
+ if( ! satisfiesFilters(resSetVal, secFilterHash) ){
+ // Return an empty result set since we are pruning at this level.
+ return pResSet;
+ }
+ }
+
+ // If we made it to here, we will not be pruning at this level, so we will
+ // be returning a copy of this resultSet that has it's subResults pruned (as needed).
+ pResSet.setVert(resSetVal.getVert());
+ pResSet.setDoNotOutputFlag(resSetVal.getDoNotOutputFlag());
+ pResSet.setExtraPropertyHash(resSetVal.getExtraPropertyHash());
+ pResSet.setLocationInModelSubGraph(resSetVal.getLocationInModelSubGraph());
+ pResSet.setNewDataDelFlag(resSetVal.getNewDataDelFlag());
+ pResSet.setPropertyLimitDesc(resSetVal.getPropertyLimitDesc());
+ pResSet.setPropertyOverRideHash(resSetVal.getPropertyOverRideHash());
+
+ if( !resSetVal.getSubResultSet().isEmpty() ){
+ ListIterator<ResultSet> listItr = resSetVal.getSubResultSet().listIterator();
+ List<ResultSet> newSubSetList = new ArrayList<>();
+ while( listItr.hasNext() ){
+ ResultSet tmpSubResSet = pruneResultSet( listItr.next(), cutPointType, secFilterHash );
+ if( tmpSubResSet.getVert() != null ){
+ // This one wasn't pruned - so keep it.
+ newSubSetList.add(tmpSubResSet);
+ }
+ }
+ pResSet.setSubResultSet(newSubSetList);
+ }
+
+ return pResSet;
+
+ }// End pruneResultSet()
+
+
+ /**
+ * Satisfies hash of filters.
+ *
+ * @param resSet the res set
+ * @param filterHash the filter hash
+ * @return true, if successful
+ * @throws AAIException the AAI exception
+ */
+ public boolean satisfiesFilters( ResultSet resSet, Map<String,Object> filterHash )
+ throws AAIException {
+
+ if( filterHash.isEmpty() ){
+ // Nothing to look for, so no, we didn't find it.
+ return false;
+ }
+
+ Iterator <?> it = filterHash.entrySet().iterator();
+ while( it.hasNext() ){
+ Map.Entry<?,?> filtEntry = (Map.Entry<?,?>) it.next();
+ String propNodeTypeDotName = (filtEntry.getKey()).toString();
+ String fpv = (filtEntry.getValue()).toString();
+
+ int periodLoc = propNodeTypeDotName.indexOf(".");
+ if( periodLoc <= 0 ){
+ String emsg = "Bad filter param key passed in: [" + propNodeTypeDotName + "]. Expected format = [nodeName.paramName]\n";
+ throw new AAIException("AAI_6120", emsg);
+ }
+ else {
+ String fnt = propNodeTypeDotName.substring(0,periodLoc);
+ String fpn = propNodeTypeDotName.substring(periodLoc + 1);
+ if( filterMetByThisSet( resSet, fnt, fpn, fpv ) ){
+ //System.out.println(" DEBUG -- satisfied/matched filter: [" + fnt + "|" + fpn + "|" + fpv + "].");
+ }
+ else {
+ //System.out.println(" DEBUG -- NOT satisfied/matched filter: [" + fnt + "|" + fpn + "|" + fpv + "].");
+ return false;
+ }
+ }
+ }
+
+ // Made it through all the filters -- it found what we were looking for.
+ return true;
+
+ }// end of satisfiesFilters()
+
+
+ /**
+ * Filter met by this set.
+ *
+ * @param resSet the res set
+ * @param filtNodeType the filt node type
+ * @param filtPropName the filt prop name
+ * @param filtPropVal the filt prop val
+ * @return true, if successful
+ */
+ public boolean filterMetByThisSet( ResultSet resSet, String filtNodeType, String filtPropName, String filtPropVal ) {
+ // Note - we are just looking for a positive match for one filter for this resultSet
+ // NOTE: we're expecting the filter to have a format like this: "nodeType.parameterName:parameterValue"
+
+ Vertex vert = resSet.getVert();
+ if( vert == null ){
+ return false;
+ }
+ else {
+ String nt = resSet.getVert().<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( nt.equals( filtNodeType ) ){
+ if( filtPropName.equals("vertex-id") ){
+ // vertex-id can't be gotten the same way as other properties
+ String thisVtxId = vert.id().toString();
+ if( thisVtxId.equals(filtPropVal) ){
+ return true;
+ }
+ }
+ else {
+ Object thisValObj = vert.property(filtPropName).orElse(null);
+ if( thisValObj != null ){
+ String thisVal = thisValObj.toString();
+ if( thisVal.equals(filtPropVal) ){
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ // Didn't find a match at the this level, so check the sets below it meet the criteria
+ if( resSet.getSubResultSet() != null ){
+ ListIterator<ResultSet> listItr = resSet.getSubResultSet().listIterator();
+ while( listItr.hasNext() ){
+ if( filterMetByThisSet(listItr.next(), filtNodeType, filtPropName, filtPropVal) ){
+ return true;
+ }
+ }
+ }
+
+ return false;
+
+ }// end of filterMetByThisSet()
+
+
+
+ /**
+ * Collapse for do not output.
+ *
+ * @param resSetVal the res set val
+ * @return the array list
+ * @throws AAIException the AAI exception
+ */
+ public List<ResultSet> collapseForDoNotOutput( ResultSet resSetVal )
+ throws AAIException {
+
+ // Given a ResultSet -- if it is tagged to NOT be output, then replace it with
+ // it's sub-ResultSets if it has any.
+ List<ResultSet> colResultSet = new ArrayList<>();
+
+ if( resSetVal.getDoNotOutputFlag().equals("true") ){
+ // This ResultSet isn't to be displayed, so replace it with it's sub-ResultSets
+ List<ResultSet> subResList = (ArrayList<ResultSet>) resSetVal.getSubResultSet();
+ for( int k = 0; k < subResList.size(); k++ ){
+ List<ResultSet> newSubResList = collapseForDoNotOutput(subResList.get(k));
+ colResultSet.addAll(newSubResList);
+ }
+ }
+ else {
+ // This set will be displayed
+ colResultSet.add(resSetVal);
+ }
+
+ // For each result set now at this level, call this same routine to collapse their sub-resultSets
+ for( int i = 0; i < colResultSet.size(); i++ ){
+ List<ResultSet> newSubSet = new ArrayList<>();
+ List<ResultSet> subResList = (ArrayList<ResultSet>) colResultSet.get(i).getSubResultSet();
+ for( int n = 0; n < subResList.size(); n++ ){
+ List<ResultSet> newSubResList = collapseForDoNotOutput(subResList.get(n));
+ newSubSet.addAll(newSubResList);
+ }
+ // Replace the old subResultSet with the collapsed set
+ colResultSet.get(i).setSubResultSet(newSubSet);
+ }
+
+ return colResultSet;
+
+ }// End collapseForDoNotOutput()
+
+
+
+ /**
+ * Collect instance data.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param thisLevelElemVtx the element vtx at this level
+ * @param thisVertsTrail the this verts trail
+ * @param elementLocationTrail -- trail of nodeTypes that got us here (this element vertex) from the top
+ * @param validNextStepMap the valid next step map -- hash of valid next steps (node types) for this model
+ * @param vidsTraversed the vids traversed -- ArrayList of vertexId's that we traversed to get to this point
+ * @param levelCounter the level counter
+ * @param delKeyHash -- hashMap of which spots on our topology should be deleted during a modelDelete
+ * @param namedQueryElementHash - hashMap which maps each spot in our widget topology to the NamedQueryElemment that it maps to
+ * @param apiVer the api ver
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public ResultSet collectInstanceData( String transId, String fromAppId,
+ Vertex thisLevelElemVtx,
+ String thisVertsTrail,
+ Multimap <String,String> validNextStepMap,
+ List<String> vidsTraversed,
+ int levelCounter,
+ Map<String,String> delKeyHash, // only applies when collecting data using the default model for delete
+ Map<String,String> namedQueryElementHash, // only applies to named-query data collecting
+ String apiVer
+ ) throws AAIException {
+
+ levelCounter++;
+
+ String thisElemVid = thisLevelElemVtx.id().toString();
+
+ if( levelCounter > MAX_LEVELS ) {
+ throw new AAIException("AAI_6125", "collectInstanceData() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
+ }
+
+ ResultSet rs = new ResultSet();
+ if( namedQueryElementHash.containsKey(thisVertsTrail) ){
+ // We're collecting data for a named-query, so need to see if we need to do anything special
+ String nqElUuid = namedQueryElementHash.get(thisVertsTrail);
+ Vertex nqElementVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query-element",
+ "named-query-element-uuid", nqElUuid);
+
+ String tmpDoNotShow = nqElementVtx.<String>property("do-not-output").orElse(null);
+ if( tmpDoNotShow != null && tmpDoNotShow.equals("true") ){
+ rs.setDoNotOutputFlag("true");
+ }
+
+ if( namedQueryConstraintSaysStop(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer) ){
+ // There was a property constraint which says they do not want to collect this vertex or whatever
+ // might be below it. Just return the empty rs here.
+ return rs;
+ }
+
+ String propLimDesc = nqElementVtx.<String>property("property-limit-desc").orElse(null);
+ if( (propLimDesc != null) && !propLimDesc.equals("") ){
+ if (propLimDesc.equalsIgnoreCase("show-all")) {
+ rs.setPropertyLimitDesc(PropertyLimitDesc.SHOW_ALL);
+ } else if (propLimDesc.equalsIgnoreCase("show-none")) {
+ rs.setPropertyLimitDesc(PropertyLimitDesc.SHOW_NONE);
+ }else if (propLimDesc.equalsIgnoreCase("name-and-keys-only")) {
+ rs.setPropertyLimitDesc(PropertyLimitDesc.SHOW_NAME_AND_KEYS_ONLY);
+ }
+ }
+
+ // Look to see if we need to use an Override of the normal properties
+ Map<String,Object> tmpPropertyOverRideHash = getNamedQueryPropOverRide(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer);
+ //System.out.println(" DEBUG --- USING this propertyOverride data set on ResSet [" + tmpPropertyOverRideHash.toString() + "]");
+ rs.setPropertyOverRideHash(tmpPropertyOverRideHash);
+
+ // See if we need to look up any "unconnected" data that needs to be associated with this result set
+ Map<String,Object> tmpExtraPropHash = getNamedQueryExtraDataLookup(transId, fromAppId, nqElementVtx, thisLevelElemVtx, apiVer);
+ //System.out.println(" DEBUG --- ADDING this EXTRA Lookup data to the ResSet [" + tmpExtraPropHash.toString() + "]");
+ rs.setExtraPropertyHash(tmpExtraPropHash);
+ }
+
+ rs.setVert(thisLevelElemVtx);
+ rs.setLocationInModelSubGraph(thisVertsTrail);
+ if( delKeyHash.containsKey(thisVertsTrail) && delKeyHash.get(thisVertsTrail).equals("T") ){
+ rs.setNewDataDelFlag("T");
+ }
+ else {
+ rs.setNewDataDelFlag("F");
+ }
+
+ // Use Gremlin-pipeline to just look for edges that go to a valid "next-steps"
+ Collection <String> validNextStepColl = validNextStepMap.get(thisVertsTrail);
+
+ // Because of how we process linkage-points, we may have duplicate node-types in our next-stepMap (for one step)
+ // So, to keep from looking (and bringing back) the same data twice, we need to make sure our next-steps are unique
+ Set<String> validNextStepHashSet = new HashSet<>();
+ Iterator <String> ntcItr = validNextStepColl.iterator();
+ while( ntcItr.hasNext() ){
+ String targetStepStr = ntcItr.next();
+ validNextStepHashSet.add(targetStepStr);
+ }
+
+ List<String> tmpVidsTraversedList = new ArrayList<>();
+ tmpVidsTraversedList.addAll(vidsTraversed);
+ tmpVidsTraversedList.add(thisElemVid);
+
+ Iterator <String> ntItr = validNextStepHashSet.iterator();
+ while( ntItr.hasNext() ){
+ String targetStep = ntItr.next();
+ // NOTE: NextSteps can either be just a nodeType, or can be a nodeType plus
+ // model-invariant-id-local and model-version-id-local (the two persona properties)
+ // if those need to be checked also.
+ // When the persona stuff is part of the step, it is a comma separated string.
+ // Ie. "nodeType,model-inv-id-local,model-version-id-local" (the two "persona" props)
+ //
+ String targetNodeType = "";
+ String pmid = "";
+ String pmv = "";
+ Boolean stepIsJustNT = true;
+ if( targetStep.contains(",") ){
+ stepIsJustNT = false;
+ String[] pieces = targetStep.split(",");
+ if( pieces.length != 3 ){
+ throw new AAIException("AAI_6128", "Unexpected format for nextStep in model processing = ["
+ + targetStep + "]. ");
+ }
+ else {
+ targetNodeType = pieces[0];
+ pmid = pieces[1];
+ pmv = pieces[2];
+ }
+ }
+ else {
+ // It's just the nodeType with no other info
+ targetNodeType = targetStep;
+ }
+
+ GraphTraversal<Vertex, Vertex> modPipe = null;
+ if( stepIsJustNT ){
+ modPipe = this.engine.asAdmin().getReadOnlyTraversalSource().V(thisLevelElemVtx).both().has(AAIProperties.NODE_TYPE, targetNodeType);
+ }
+ else {
+ modPipe = this.engine.asAdmin().getReadOnlyTraversalSource().V(thisLevelElemVtx).both().has(AAIProperties.NODE_TYPE, targetNodeType).has(addDBAliasedSuffix("model-invariant-id"),pmid).has(addDBAliasedSuffix("model-version-id"),pmv);
+ }
+
+ if( modPipe == null || !modPipe.hasNext() ){
+ //System.out.println("DEBUG - didn't find any [" + targetStep + "] connected to this guy (which is ok)");
+ }
+ else {
+ while( modPipe.hasNext() ){
+ Vertex tmpVert = (Vertex) modPipe.next();
+ String tmpVid = tmpVert.id().toString();
+ String tmpTrail = thisVertsTrail + "|" + targetStep;
+ if( !vidsTraversed.contains(tmpVid) ){
+ // This is one we would like to use - so we'll include the result set we get for it
+ ResultSet tmpResSet = collectInstanceData( transId, fromAppId,
+ tmpVert, tmpTrail,
+ validNextStepMap, tmpVidsTraversedList,
+ levelCounter, delKeyHash, namedQueryElementHash, apiVer );
+
+ rs.getSubResultSet().add(tmpResSet);
+ }
+ }
+ }
+ }
+
+ return rs;
+
+ } // End of collectInstanceData()
+
+
+ /**
+ * Gen topo map 4 model.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelVerVertex the model-ver vertex
+ * @param modelVerId the model-version-id
+ * @param loader the db maps
+ * @return MultiMap of valid next steps for each potential model-element
+ * @throws AAIException the AAI exception
+ */
+ public Multimap<String, String> genTopoMap4ModelVer( String transId, String fromAppId,
+ Vertex modelVerVertex, String modelVerId)
+ throws AAIException {
+
+ if( modelVerVertex == null ){
+ throw new AAIException("AAI_6114", "null modelVerVertex passed to genTopoMap4ModelVer()");
+ }
+
+ Multimap <String, String> initialEmptyMap = ArrayListMultimap.create();
+ List<String> vidsTraversed = new ArrayList<>();
+ String modelType = getModelTypeFromModelVer( modelVerVertex, "" );
+ if( modelType.equals("widget") ){
+ // A widget model by itself does not have a topoplogy. That is - it has no "model-elements" which
+ // define how it is connected to other things. All it has is a name which ties it to
+ // an aai-node-type
+ Iterator<Vertex> vertI= this.traverseIncidentEdges(EdgeType.TREE, modelVerVertex, "model-element");
+ if( vertI != null && vertI.hasNext() ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: Widget Model has a startsWith edge to a model-element. "
+ + " model-version-id = " + modelVerId);
+ }
+ else {
+ return initialEmptyMap;
+ }
+ }
+
+ String firstModelVerId = modelVerVertex.<String>property("model-version-id").orElse(null);
+ String firstModelVersion = modelVerVertex.<String>property("model-version").orElse(null);
+ if( firstModelVerId == null || firstModelVerId.equals("") || firstModelVersion == null || firstModelVersion.equals("") ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-version-id or model-version. model-version-id = "
+ + modelVerId);
+ }
+
+ Vertex firstElementVertex = getTopElementForSvcOrResModelVer( modelVerVertex, "" );
+ Vertex firstEleModVerVtx = getModelVerThatElementRepresents( firstElementVertex, "" );
+ String firstElemModelType = getModelTypeFromModelVer( firstEleModVerVtx, "" );
+ if( ! firstElemModelType.equals("widget") ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: First element must correspond to a widget type model. Model UUID = "
+ + modelVerId);
+ }
+
+ Vertex firstModVtx = getModelGivenModelVer( modelVerVertex, "" );
+ String firstModelInvId = firstModVtx.<String>property("model-invariant-id").orElse(null);
+ if( firstModelInvId == null || firstModelInvId.equals("") ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: Could not find model.model-invariant-id given model-ver.model-version-id = "
+ + modelVerId);
+ }
+
+ Multimap <String, String> collectedMap = collectTopology4ModelVer( transId, fromAppId,
+ firstElementVertex, "", initialEmptyMap, vidsTraversed, 0, null, firstModelInvId, firstModelVersion );
+
+ return collectedMap;
+
+ } // End of genTopoMap4ModelVer()
+
+
+ public List<String> makeSureItsAnArrayList( String listStringVal ){
+ // We're sometimes getting a String back on db properties that should be ArrayList<String>
+ // Seems to be how they're defined in OXM - whether they use a "xml-wrapper" or not
+ // Need to translate them into ArrayLists sometimes...
+
+ List<String> retArrList = new ArrayList<String>();
+ String listString = listStringVal;
+ listString = listString.replace(" ", "");
+ listString = listString.replace("\"", "");
+ listString = listString.replace("[", "");
+ listString = listString.replace("]", "");
+ String [] pieces = listString.split(",");
+ if( pieces != null && pieces.length > 0 ){
+ for( int i = 0; i < pieces.length; i++ ){
+ retArrList.add(pieces[i]);
+ }
+ }
+
+ return retArrList;
+ }
+
+
+ /**
+ * Gets the mod constraint hash.
+ *
+ * @param modelElementVtx the model element vtx
+ * @param currentHash -- the current ModelConstraint's that this routine will add to if it finds any.
+ * @return HashMap of model-constraints that will be looked at for this model-element and what's "below" it.
+ * @throws AAIException the AAI exception
+ */
+ public Map<String, Vertex> getModConstraintHash( Vertex modelElementVtx, Map<String, Vertex> currentHash )
+ throws AAIException {
+
+ // For a given model-element vertex, look to see if there are any "model-constraint" elements that is has
+ // an OUT "uses" edge to. If it does, then get any "constrained-element-set" nodes that are pointed to
+ // by the "model-constraint". That will be the replacement "constrained-element-set". The UUID of the
+ // "constrained-element-set" that it is supposed to replace is found in the property:
+ // model-constraint.constrained-element-set-uuid-to-replace
+ //
+ // For now, that is the only type of model-constraint allowed, so that is all we will look for.
+ // Pass back any of these "constrained-element-set" nodes along with any that were passed in by
+ // the "currentHash" parameter.
+
+ if( modelElementVtx == null ){
+ String msg = " null modelElementVtx passed to getModConstraintHash() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ String modelType = modelElementVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( modelType == null || (!modelType.equals("model-element")) ){
+ String msg = " getModConstraintHash() called with wrong type model: [" + modelType + "]. ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ Map<String, Vertex> thisHash = new HashMap<>();
+ if( currentHash != null ){
+ thisHash.putAll(currentHash);
+ }
+
+ int count = 0;
+ List<Vertex> modelConstraintArray = new ArrayList<>();
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, modelElementVtx, "model-constraint");
+ while( vertI != null && vertI.hasNext() ){
+ Vertex tmpVert = vertI.next();
+ String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( (connectToType != null) && connectToType.equals("model-constraint") ){
+ // We need to find the constrained element set pointed to by this and add it to the Hash to return
+ modelConstraintArray.add(tmpVert);
+ count++;
+ }
+ }
+
+ if( count > 0 ) {
+ for( int i = 0; i < count; i++ ){
+ Vertex vtxOfModelConstraint = modelConstraintArray.get(i);
+ String uuidOfTheOneToBeReplaced = vtxOfModelConstraint.<String>property("constrained-element-set-uuid-2-replace").orElse(null);
+ // We have the UUID of the constrained-element-set that will be superseded, now find the
+ // constrained-element-set to use in its place
+ Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.TREE, vtxOfModelConstraint, "constrained-element-set");
+ while( mvertI != null && mvertI.hasNext() ){
+ // There better only be one...
+ Vertex tmpVert = mvertI.next();
+ String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( (connectToType != null) && connectToType.equals("constrained-element-set") ){
+ // This is the "constrained-element-set" that we want to use as the Replacement
+ thisHash.put(uuidOfTheOneToBeReplaced, tmpVert );
+ }
+ }
+ }
+ return thisHash;
+ }
+ else {
+ // Didn't find anything to add, so just return what they passed in.
+ return currentHash;
+ }
+
+ } // End of getModConstraintHash()
+
+
+ /**
+ * Gets the top element vertex for service or resource model.
+ *
+ * @param modelVerVtx the model-ver vertex
+ * @return first element pointed to by this model-ver
+ * @throws AAIException the AAI exception
+ */
+ public Vertex getTopElementForSvcOrResModelVer( Vertex modelVerVtx, String trail )
+ throws AAIException {
+
+ // For a "resource" or "service" type model, return the "top" element in that model
+ if( modelVerVtx == null ){
+ String msg = " null modelVertex passed to getTopoElementForSvcOrResModelVer() at [" + trail + "]. ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ String modelVerId = modelVerVtx.<String>property("model-version-id").orElse(null);
+ if( modelVerId == null ){
+ String nt = modelVerVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( nt != null && !nt.equals("model-ver") ){
+ String msg = "Illegal model defined: model element pointing to nodeType: ["
+ + nt + "], should be pointing to: [model-ver] at [" + trail + "]. ";
+ throw new AAIException("AAI_6132", msg);
+ }
+ }
+
+ Vertex firstElementVertex = null;
+
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, modelVerVtx, "model-element");
+ int elCount = 0;
+ while( vertI != null && vertI.hasNext() ){
+ elCount++;
+ firstElementVertex = vertI.next();
+ }
+
+ if( elCount > 1 ){
+ String msg = "Illegal model defined: More than one first element defined for model-ver-id = " +
+ modelVerId + " at [" + trail + "]. ";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ if( firstElementVertex == null ){
+ String msg = "Could not find first model element for model-ver-id = "
+ + modelVerId + " at [" + trail + "]. ";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ return firstElementVertex;
+
+ } // End of getTopElementForSvcOrResModelVer()
+
+
+
+ /**
+ * Gets the named query prop over ride.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryElementVertex the named query element vertex
+ * @param instanceVertex the instance vertex
+ * @param apiVer the api ver
+ * @return HashMap of alternate properties to return for this element
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,Object> getNamedQueryPropOverRide( String transId, String fromAppId,
+ Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
+ throws AAIException {
+
+ // If this model-element says that they want an alternative set of properties returned, then pull that
+ // data out of the instance vertex.
+
+ Map<String,Object> altPropHash = new HashMap<>();
+
+ if( namedQueryElementVertex == null ){
+ String msg = " null namedQueryElementVertex passed to getNamedQueryPropOverRide() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ List<String> propCollectList = new ArrayList<>();
+ Iterator <VertexProperty<Object>> vpI = namedQueryElementVertex.properties("property-collect-list");
+ while( vpI.hasNext() ){
+ propCollectList.add((String)vpI.next().value());
+ }
+
+ for( int i = 0; i < propCollectList.size(); i++ ){
+ String thisPropName = propCollectList.get(i);
+ Object instanceVal = instanceVertex.<Object>property(thisPropName).orElse(null);
+ altPropHash.put(thisPropName, instanceVal);
+ }
+
+ return altPropHash;
+
+ } // End of getNamedQueryPropOverRide()
+
+
+ /**
+ * Named query constraint says stop.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryElementVertex the named query element vertex
+ * @param instanceVertex the instance vertex
+ * @param apiVer the api ver
+ * @return true - if a constraint was defined that has not been met by the passed instanceVertex
+ * @throws AAIException the AAI exception
+ */
+ public Boolean namedQueryConstraintSaysStop( String transId, String fromAppId,
+ Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
+ throws AAIException {
+
+ // For each (if any) property-constraint defined for this named-query-element, we will evaluate if
+ // the constraint is met or not-met. if there are constraints and any are not-met, then
+ // we return "true".
+
+ if( namedQueryElementVertex == null ){
+ String msg = " null namedQueryElementVertex passed to namedQueryConstraintSaysStop() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+ if( instanceVertex == null ){
+ String msg = " null instanceVertex passed to namedQueryConstraintSaysStop() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ Iterator<Vertex> constrPipe = this.traverseIncidentEdges(EdgeType.TREE, namedQueryElementVertex, "property-constraint");
+ if( constrPipe == null || !constrPipe.hasNext() ){
+ // There's no "property-constraint" defined for this named-query-element. No problem.
+ return false;
+ }
+
+ while( constrPipe.hasNext() ){
+ Vertex constrVtx = (Vertex) constrPipe.next();
+ // We found a property constraint that we will need to check
+ String conType = constrVtx.<String>property("constraint-type").orElse(null);
+ if( (conType == null) || conType.equals("")){
+ String msg = " Bad property-constraint (constraint-type) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+ String propName = constrVtx.<String>property("property-name").orElse(null);
+ if( (propName == null) || propName.equals("")){
+ String msg = " Bad property-constraint (property-name) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+ String propVal = constrVtx.<String>property("property-value").orElse(null);
+ if( (propVal == null) || propVal.equals("")){
+ String msg = " Bad property-constraint (propVal) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+
+ // See if that constraint is met or not
+ String val = instanceVertex.<String>property(propName).orElse(null);
+ if( val == null ){
+ val = "";
+ }
+
+ if( conType.equals("EQUALS") ){
+ if( !val.equals(propVal) ){
+ // This constraint was not met
+ return true;
+ }
+ }
+ else if( conType.equals("NOT-EQUALS") ){
+ if( val.equals(propVal) ){
+ // This constraint was not met
+ return true;
+ }
+ }
+ else {
+ String msg = " Bad property-constraint (constraint-type) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+ }
+
+ return false;
+
+ } // End of namedQueryConstraintSaysStop()
+
+
+ /**
+ * Gets the named query extra data lookup.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryElementVertex the named query element vertex
+ * @param instanceVertex the instance vertex
+ * @param apiVer the api ver
+ * @return HashMap of alternate properties to return for this element
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,Object> getNamedQueryExtraDataLookup( String transId, String fromAppId,
+ Vertex namedQueryElementVertex, Vertex instanceVertex, String apiVer )
+ throws AAIException {
+
+ // For each (if any) related-lookup defined for this named-query-element, we will go and
+ // and try to find it. All the related-lookup data will get put in a hash and returned.
+
+ if( namedQueryElementVertex == null ){
+ String msg = " null namedQueryElementVertex passed to getNamedQueryExtraDataLookup() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+ if( instanceVertex == null ){
+ String msg = " null instanceVertex passed to getNamedQueryExtraDataLookup() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ Map<String,Object> retHash = new HashMap<>();
+
+ Iterator<Vertex> lookPipe = this.traverseIncidentEdges(EdgeType.TREE, namedQueryElementVertex, "related-lookup");
+ if( lookPipe == null || !lookPipe.hasNext() ){
+ // There's no "related-lookup" defined for this named-query-element. No problem.
+ return retHash;
+ }
+
+ while( lookPipe.hasNext() ){
+ Vertex relLookupVtx = (Vertex) lookPipe.next();
+ // We found a related-lookup record to try and use
+ String srcProp = relLookupVtx.<String>property("source-node-property").orElse(null);
+ String srcNodeType = relLookupVtx.<String>property("source-node-type").orElse(null);
+ srcProp = getPropNameWithAliasIfNeeded(srcNodeType, srcProp);
+
+ if( (srcProp == null) || srcProp.equals("")){
+ String msg = " Bad related-lookup (source-node-property) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+ String targetNodeType = relLookupVtx.<String>property("target-node-type").orElse(null);
+ if( (targetNodeType == null) || targetNodeType.equals("")){
+ String msg = " Bad related-lookup (targetNodeType) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+ String targetProp = relLookupVtx.<String>property("target-node-property").orElse(null);
+ targetProp = getPropNameWithAliasIfNeeded(targetNodeType, targetProp);
+
+ if( (targetProp == null) || targetProp.equals("")){
+ String msg = " Bad related-lookup (target-node-property) found in Named Query definition. ";
+ throw new AAIException("AAI_6133", msg);
+ }
+
+ List<String> propCollectList = new ArrayList<>();
+ Iterator <VertexProperty<Object>> vpI = relLookupVtx.properties("property-collect-list");
+ while( vpI.hasNext() ){
+ propCollectList.add((String)vpI.next().value());
+ }
+
+ // Use the value from the source to see if we can find ONE target record using the
+ // value from the source
+ String valFromInstance = instanceVertex.<String>property(srcProp).orElse(null);
+ if( valFromInstance == null ){
+ // if there is no key to use to go look up something, we should end it here and just
+ // note what happened - no need to try to look something up by an empty key
+ LOGGER.debug("WARNING - the instance data node of type [" + srcNodeType
+ + "] did not have a value for property [" + srcProp
+ + "], so related-lookup is being abandoned.");
+ return retHash;
+ }
+
+ Map<String,Object> propHash = new HashMap<String,Object>();
+ propHash.put(targetProp, valFromInstance);
+
+ Optional<Vertex> result = dbMethHelper.locateUniqueVertex(targetNodeType, propHash);
+ if (!result.isPresent()) {
+ // If it can't find the lookup node, don't fail, just log that it couldn't be found ---
+ LOGGER.debug("WARNING - Could not find lookup node that corresponds to nodeType ["
+ + targetNodeType + "] propertyName = [" + srcProp
+ + "], propVal = [" + valFromInstance
+ + "] so related-lookup is being abandoned.");
+ return retHash;
+ }
+ else {
+ Vertex tmpVtx = result.get();
+ // Pick up the properties from the target vertex that they wanted us to get
+ for( int j = 0; j < propCollectList.size(); j++ ){
+ String tmpPropName = propCollectList.get(j);
+ tmpPropName = getPropNameWithAliasIfNeeded(targetNodeType, tmpPropName);
+ Object valObj = tmpVtx.<Object>property(tmpPropName).orElse(null);
+ String lookupKey = targetNodeType + "." + tmpPropName;
+ retHash.put(lookupKey, valObj);
+
+ }
+ }
+ }
+
+ return retHash;
+
+ } // End of getNamedQueryExtraDataLookup()
+
+ /**
+ * Collect NQ element hash.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param thisLevelElemVtx the element verrtx for this level
+ * @param incomingTrail the incoming trail -- trail of nodeTypes that got us here (this nq-element vertex) from the top
+ * @param currentHash the current hash
+ * @param Map that got us to this point (that we will use as the base of the map we will return)
+ * @param vidsTraversed the vids traversed -- ArrayList of vertexId's that we traversed to get to this point
+ * @param levelCounter the level counter
+ * @return HashMap of all widget-points on a namedQuery topology with the value being the "named-query-element-uuid" for that spot.
+ * @throws AAIException the AAI exception
+ */
+ public Map<String, String> collectNQElementHash( String transId, String fromAppId,
+ Vertex thisLevelElemVtx, String incomingTrail,
+ Map<String,String> currentHash, ArrayList <String> vidsTraversed,
+ int levelCounter ) throws AAIException {
+
+ levelCounter++;
+
+ Map<String, String> thisHash = new HashMap<>();
+ thisHash.putAll(currentHash);
+
+ if( levelCounter > MAX_LEVELS ) {
+ throw new AAIException("AAI_6125", "collectNQElementHash() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
+ }
+ String thisGuysTrail = "";
+ String thisElemVid = thisLevelElemVtx.id().toString();
+
+ // Find out what widget (and thereby what aai-node-type) this element represents.
+ String thisElementNodeType = getNqElementWidgetType( transId, fromAppId, thisLevelElemVtx, incomingTrail );
+
+ if( incomingTrail == null || incomingTrail.equals("") ){
+ // This is the first one
+ thisGuysTrail = thisElementNodeType;
+ }
+ else {
+ thisGuysTrail = incomingTrail + "|" + thisElementNodeType;
+ }
+ vidsTraversed.add(thisElemVid);
+
+ String nqElementUuid = thisLevelElemVtx.<String>property("named-query-element-uuid").orElse(null);
+ if( nqElementUuid == null || nqElementUuid.equals("") ){
+ String msg = " named-query element UUID not found at trail = [" + incomingTrail + "].";
+ throw new AAIException("AAI_6133", msg);
+ }
+ thisHash.put(thisGuysTrail, nqElementUuid );
+
+ // Now go "down" and look at the sub-elements pointed to so we can get their data.
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, thisLevelElemVtx, "named-query-element");
+ while( vertI != null && vertI.hasNext() ){
+ Vertex tmpVert = vertI.next();
+ String vid = tmpVert.id().toString();
+ Map<String,Object> elementHash = new HashMap<String, Object>();
+
+ String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( connectToType != null && connectToType.equals("named-query-element") ){
+ // This is what we would expect
+ elementHash.put(vid, tmpVert);
+ }
+ else {
+ String msg = " named query element has [connectedTo] edge to improper nodeType= ["
+ + connectToType + "] trail = [" + incomingTrail + "].";
+ throw new AAIException("AAI_6133", msg);
+ }
+ for( Map.Entry<String, Object> entry : elementHash.entrySet() ){
+ Vertex elVert = (Vertex)(entry.getValue());
+ String tmpElVid = elVert.id().toString();
+ if( !vidsTraversed.contains(tmpElVid) ){
+ // This is one we would like to use - so we'll recursively get it's result set to add to ours
+ Map<String, String> tmpHash = collectNQElementHash( transId, fromAppId,
+ elVert, thisGuysTrail, currentHash, vidsTraversed, levelCounter);
+ thisHash.putAll(tmpHash);
+ }
+ }
+ }
+ return thisHash;
+
+ } // End of collectNQElementHash()
+
+
+ /**
+ * Collect delete key hash.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param thisLevelElemVtx the element vertex at this level
+ * @param incomingTrail the incoming trail -- trail of nodeTypes that got us here (this vertex) from the top
+ * @param currentHash the current hash
+ * @param Map that got us to this point (that we will use as the base of the map we will return)
+ * @param vidsTraversed the vids traversed ---- ArrayList of vertexId's that we traversed to get to this point
+ * @param levelCounter the level counter
+ * @param loader the db maps
+ * @param modConstraintHash the mod constraint hash
+ * @param overRideModelId the over ride model id
+ * @param overRideModelVersionId the over ride model version id
+ * @return HashMap of all widget-points on a model topology with the value being the "newDataDelFlag" for that spot.
+ * @throws AAIException the AAI exception
+ */
+ public Map<String, String> collectDeleteKeyHash( String transId, String fromAppId,
+ Vertex thisLevelElemVtx, String incomingTrail,
+ Map<String,String> currentHash, ArrayList <String> vidsTraversed,
+ int levelCounter, Map<String, Vertex> modConstraintHash,
+ String overRideModelId, String overRideModelVersionId )
+ throws AAIException {
+
+ levelCounter++;
+
+ Map<String, String> thisHash = new HashMap<>();
+ thisHash.putAll(currentHash);
+
+ if( levelCounter > MAX_LEVELS ) {
+ throw new AAIException("AAI_6125", "collectDeleteKeyHash() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
+ }
+ String thisGuysTrail = "";
+ String thisElemVid = thisLevelElemVtx.id().toString();
+ Map<String, Vertex> modConstraintHash2Use = null;
+
+ // If this element represents a resource or service model, then we will replace this element with
+ // the "top" element of that resource or service model. That model-element already points to its
+ // topology, so it will graft in that model's topology.
+ // EXCEPT - if this element has "linkage-points" defined, then we need to do some extra
+ // processing for how we join to that model and will not try to go any "deeper".
+ List<String> linkagePtList = new ArrayList<>();
+ Iterator <VertexProperty<Object>> vpI = thisLevelElemVtx.properties("linkage-points");
+
+ // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
+ // we get back the whole array of Strings in one String - but still use the "vtx.properties()" to
+ // get it - but only look at the first thing returned by the iterator.
+ if( vpI.hasNext() ){
+ String tmpLinkageThing = (String)vpI.next().value();
+ linkagePtList = makeSureItsAnArrayList( tmpLinkageThing );
+ }
+
+ if( linkagePtList != null && !linkagePtList.isEmpty() ){
+ // Whatever this element is - we are connecting to it via a linkage-point
+ // We will figure out what to do and then return without going any deeper
+ String elemFlag = thisLevelElemVtx.<String>property("new-data-del-flag").orElse(null);
+
+ Set<String> linkageConnectNodeTypes = getLinkageConnectNodeTypes( linkagePtList );
+ Iterator <?> linkNtIter = linkageConnectNodeTypes.iterator();
+ String incTrail = "";
+ if( incomingTrail != null && !incomingTrail.equals("") ){
+ incTrail = incomingTrail + "|";
+ }
+
+ while( linkNtIter.hasNext() ){
+ // The 'trail' (or trails) for this element should just be the to the first-contact on the linkage point
+ String linkTrail = incTrail + linkNtIter.next();
+ Boolean alreadyTaggedFalse = false;
+ if( thisHash.containsKey(linkTrail) && thisHash.get(linkTrail).equals("F") ){
+ // some other path with a matching trail has the deleteFlag set to "F", so we do not want
+ // to override that since our model code only uses nodeTypes to know where it is - and we
+ // would rather do less deleting than needed instead of too much deleting.
+ alreadyTaggedFalse = true;
+ }
+ if( elemFlag != null && elemFlag.equals("T") && !alreadyTaggedFalse ){
+ // This trail should be marked with an "T"
+ thisHash.put(linkTrail, "T");
+ }
+ else {
+ thisHash.put(linkTrail, "F");
+ }
+ }
+ return thisHash;
+ }
+
+ // ----------------------------------------------------------------------------
+ // If we got to here, then this was not an element that used a linkage-point
+ // ----------------------------------------------------------------------------
+
+ // Find out what widget-model (and thereby what aai-node-type) this element represents.
+ // Even if this element is pointing to a service or resource model, it must have a
+ // first element which is a single widget-type model.
+ String thisElementNodeType = getModElementWidgetType( thisLevelElemVtx, incomingTrail );
+ String firstElementModelInfo = "";
+
+ vidsTraversed.add(thisElemVid);
+ Vertex elementVtxForThisLevel = null;
+ Vertex thisElementsModelVerVtx = getModelVerThatElementRepresents( thisLevelElemVtx, incomingTrail );
+ Vertex thisElementsModelVtx = getModelGivenModelVer( thisElementsModelVerVtx, incomingTrail );
+ String modType = getModelTypeFromModel( thisElementsModelVtx, incomingTrail );
+ String subModelFirstModInvId = thisElementsModelVtx.<String>property("model-invariant-id").orElse(null);
+ String subModelFirstVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
+ if( modType.equals("widget") ){
+ if( overRideModelId != null && !overRideModelId.equals("") ){
+ // Note - this is just to catch the correct model for the TOP node in a model since
+ // it will have an element which will always be a widget even though the model
+ // could be a resource or service model.
+ firstElementModelInfo = "," + overRideModelId + "," + overRideModelVersionId;
+ }
+ }
+ else if( nodeTypeSupportsPersona(thisElementNodeType) ){
+ firstElementModelInfo = "," + subModelFirstModInvId + "," + subModelFirstVerId;
+ }
+
+ if( incomingTrail.equals("") ){
+ // This is the first one
+ thisGuysTrail = thisElementNodeType + firstElementModelInfo;
+ }
+ else {
+ thisGuysTrail = incomingTrail + "|" + thisElementNodeType + firstElementModelInfo;
+ }
+
+ String tmpFlag = "F";
+ Boolean stoppedByASvcOrResourceModelElement = false;
+ if( modType.equals("widget") ){
+ elementVtxForThisLevel = thisLevelElemVtx;
+ // For the element-model for the widget at this level, record it's delete flag
+ tmpFlag = elementVtxForThisLevel.<String>property("new-data-del-flag").orElse(null);
+ }
+ else {
+ // For an element that is referring to a resource or service model, we replace
+ // this element with the "top" element for that resource/service model so that the
+ // topology of that resource/service model will be included in this topology.
+ String modelVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
+ if( subModelFirstModInvId == null || subModelFirstModInvId.equals("")
+ || subModelFirstVerId == null || subModelFirstVerId.equals("") ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-invariant-id or model-version-id. Model-version-id = " +
+ modelVerId + ", at [" + incomingTrail + "]");
+ }
+
+ // BUT -- if the model-element HERE at the resource/service level does NOT have
+ // it's new-data-del-flag set to "T", then we do not need to go down into the
+ // sub-model looking for delete-able things.
+
+ tmpFlag = thisLevelElemVtx.<String>property("new-data-del-flag").orElse(null);
+ elementVtxForThisLevel = getTopElementForSvcOrResModelVer(thisElementsModelVerVtx, thisGuysTrail);
+ if( tmpFlag != null && tmpFlag.equals("T") ){
+ modConstraintHash2Use = getModConstraintHash( thisLevelElemVtx, modConstraintHash );
+ }
+ else {
+ stoppedByASvcOrResourceModelElement = true;
+ }
+ // For the element-model for the widget at this level, record it's delete flag
+ tmpFlag = elementVtxForThisLevel.<String>property("new-data-del-flag").orElse(null);
+ }
+
+ String flag2Use = "F"; // by default we'll use "F" for the delete flag
+ if( ! stoppedByASvcOrResourceModelElement ){
+ // Since we haven't been stopped by a resource/service level "F", we can look at the lower level flag
+ if( thisHash.containsKey(thisGuysTrail) ){
+ // We've seen this spot in the topology before - do not override the delete flag if the older one is "F"
+ // We will only over-ride it if the old one was "T" and the new one is "F" (anything but "T")
+ String oldFlag = thisHash.get(thisGuysTrail);
+ if( oldFlag.equals("T") && (tmpFlag != null) && tmpFlag.equals("T") ){
+ // The old flag was "T" and the new flag is also "T"
+ flag2Use = "T";
+ }
+ else {
+ // the old flag was not "F" - so don't override it
+ flag2Use = "F";
+ }
+ }
+ else if( (tmpFlag != null) && tmpFlag.equals("T") ){
+ // We have not seen this one, so we can set it to "T" if that's what it is.
+ flag2Use = "T";
+ }
+ }
+
+ thisHash.put(thisGuysTrail, flag2Use);
+ if( ! stoppedByASvcOrResourceModelElement ){
+ // Since we haven't been stopped by a resource/service level "F", we will continue to
+ // go "down" and look at the elements pointed to so we can get their data.
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, elementVtxForThisLevel, "model-element", "constrained-element-set");
+ while( vertI != null && vertI.hasNext() ){
+ Vertex tmpVert = vertI.next();
+ String vid = tmpVert.id().toString();
+ Map<String,Object> elementHash = new HashMap<String, Object>();
+
+ String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( connectToType != null && connectToType.equals("model-element") ){
+ // A nice, regular old model-element
+ elementHash.put(vid, tmpVert);
+ }
+ else if( (connectToType != null) && connectToType.equals("constrained-element-set") ){
+ // translate the constrained-element-set into a hash of model-element Vertex's
+ String constrainedElementSetUuid = tmpVert.<String>property("constrained-element-set-uuid").orElse(null);
+ if( (modConstraintHash2Use != null) && modConstraintHash2Use.containsKey(constrainedElementSetUuid) ){
+ // This constrained-element-set is being superseded by a different one
+ Vertex replacementConstraintVert = modConstraintHash.get(constrainedElementSetUuid);
+ elementHash = getNextStepElementsFromSet( replacementConstraintVert );
+ // Now that we've found and used the replacement constraint, we don't need to carry it along any farther
+ modConstraintHash.remove(constrainedElementSetUuid);
+ }
+ else {
+ elementHash = getNextStepElementsFromSet( tmpVert );
+ }
+ }
+ else {
+ String msg = " model-element has [connectedTo] edge to improper nodeType= ["
+ + connectToType + "] trail = [" + incomingTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ for( Map.Entry<String, Object> entry : elementHash.entrySet() ){
+ Vertex elVert = (Vertex)(entry.getValue());
+ String tmpElVid = elVert.id().toString();
+ String tmpElNT = getModElementWidgetType( elVert, thisGuysTrail );
+ check4EdgeRule(tmpElNT, thisElementNodeType);
+ if( !vidsTraversed.contains(tmpElVid) ){
+ // This is one we would like to use - so we'll recursively get it's result set to add to ours
+ Map<String, String> tmpHash = collectDeleteKeyHash( transId, fromAppId,
+ elVert, thisGuysTrail,
+ currentHash, vidsTraversed, levelCounter, modConstraintHash2Use,
+ "", "" );
+ thisHash.putAll(tmpHash);
+ }
+ }
+ }
+ }
+ return thisHash;
+
+ } // End of collectDeleteKeyHash()
+
+
+ /**
+ * Gets the linkage connect node types.
+ *
+ * @param linkagePtList the linkage pt list
+ * @return the linkage connect node types
+ * @throws AAIException the AAI exception
+ */
+ public Set<String> getLinkageConnectNodeTypes(List<String> linkagePtList )
+ throws AAIException {
+ // linkage points are a path from the top of a model to where we link in.
+ // This method wants to just bring back a list of distinct last items.
+ // Ie: for the input with these two: "pserver|lag-link|l-interface" and "pserver|p-interface|l-interface"
+ // it would just return a single item, "l-interface" since both linkage points end in that same node-type.
+
+ Set<String> linkPtSet = new HashSet<>();
+
+ if( linkagePtList == null ){
+ String detail = " Bad (null) linkagePtList passed to getLinkageConnectNodeTypes() ";
+ throw new AAIException("AAI_6125", detail);
+ }
+
+ for( int i = 0; i < linkagePtList.size(); i++ ){
+ String [] trailSteps = linkagePtList.get(i).split("\\|");
+ if( trailSteps == null || trailSteps.length == 0 ){
+ String detail = " Bad incomingTrail passed to getLinkageConnectNodeTypes(): [" + linkagePtList + "] ";
+ throw new AAIException("AAI_6125", detail);
+ }
+ String lastStepNT = trailSteps[trailSteps.length - 1];
+ linkPtSet.add(lastStepNT);
+ }
+
+ return linkPtSet;
+
+ }// End getLinkageConnectNodeTypes()
+
+
+ /**
+ * Collect topology for model-ver.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelElement vertex to collect for
+ * @param incomingTrail the incoming trail -- trail of nodeTypes/personaInfo that got us here (this vertex) from the top
+ * @param currentMap the current map -- map that got us to this point (that we will use as the base of the map we will return)
+ * @param vidsTraversed the vids traversed -- ArrayList of vertexId's that we traversed to get to this point
+ * @param levelCounter the level counter
+ * @param loader the db maps
+ * @param modConstraintHash the mod constraint hash
+ * @param overRideModelInvId the override model-invariant-id
+ * @param overRideModelVersionId the override model-version-id
+ * @return Map of the topology
+ * @throws AAIException the AAI exception
+ */
+ public Multimap<String, String> collectTopology4ModelVer( String transId, String fromAppId,
+ Vertex thisLevelElemVtx, String incomingTrail,
+ Multimap <String,String> currentMap, List<String> vidsTraversed,
+ int levelCounter, Map<String, Vertex> modConstraintHash,
+ String overRideModelInvId, String overRideModelVersionId )
+ throws AAIException {
+
+ levelCounter++;
+
+ Multimap <String, String> thisMap = ArrayListMultimap.create();
+ thisMap.putAll(currentMap);
+
+ if( levelCounter > MAX_LEVELS ) {
+ throw new AAIException("AAI_6125", "collectTopology4ModelVer() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
+ }
+ String thisGuysTrail = "";
+ String thisElemVid = thisLevelElemVtx.id().toString();
+ Map<String, Vertex> modConstraintHash2Use = null;
+
+ // If this element represents a resource or service model, then we will replace this element with
+ // the "top" element of that resource or service model. That model-element already points to its
+ // topology, so it will graft in that model's topology.
+ // EXCEPT - if this element defines "linkage-points" defined, then we need to do some extra
+ // processing for how we join to that model.
+
+ // Find out what widget-model (and thereby what aai-node-type) this element represents.
+ // Even if this element is pointing to a service or resource model, it must have a
+ // first element which is a single widget-type model.
+ String firstElementModelInfo = "";
+ String thisElementNodeType = getModElementWidgetType( thisLevelElemVtx, incomingTrail );
+ if( nodeTypeSupportsPersona(thisElementNodeType) && overRideModelInvId != null && !overRideModelInvId.equals("") ){
+ firstElementModelInfo = "," + overRideModelInvId + "," + overRideModelVersionId;
+ }
+
+ Vertex elementVtxForThisLevel = null;
+ Vertex thisElementsModelVerVtx = getModelVerThatElementRepresents( thisLevelElemVtx, incomingTrail );
+ String subModelFirstModInvId = "";
+ String subModelFirstModVerId = "";
+ String modInfo4Trail = "";
+ String modType = getModelTypeFromModelVer( thisElementsModelVerVtx, incomingTrail );
+ if( modType.equals("resource") || modType.equals("service") ){
+ // For an element that is referring to a resource or service model, we replace this
+ // this element with the "top" element for that resource/service model so that the
+ // topology of that resource/service model gets included in this topology.
+ // -- Note - since that top element of a service or resource model will point to a widget model,
+ // we have to track what modelId/version it really maps so we can make our recursive call
+ Vertex thisElementsModelVtx = getModelGivenModelVer(thisElementsModelVerVtx, incomingTrail);
+ subModelFirstModInvId = thisElementsModelVtx.<String>property("model-invariant-id").orElse(null);
+ subModelFirstModVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
+
+ if( nodeTypeSupportsPersona(thisElementNodeType) ){
+ modInfo4Trail = "," + subModelFirstModInvId + "," + subModelFirstModVerId;
+ }
+ String modelVerId = thisElementsModelVerVtx.<String>property("model-version-id").orElse(null);
+ if( subModelFirstModInvId == null || subModelFirstModInvId.equals("") || subModelFirstModVerId == null || subModelFirstModVerId.equals("") ){
+ throw new AAIException("AAI_6132", "Bad Model Definition: Bad model-invariant-id or model-version-id. Model-ver-id = " + modelVerId);
+ }
+
+ elementVtxForThisLevel = getTopElementForSvcOrResModelVer(thisElementsModelVerVtx, incomingTrail);
+ modConstraintHash2Use = getModConstraintHash( thisLevelElemVtx, modConstraintHash );
+ }
+ else {
+ elementVtxForThisLevel = thisLevelElemVtx;
+ }
+
+ if( incomingTrail.equals("") ){
+ // This is the first one
+ thisGuysTrail = thisElementNodeType + firstElementModelInfo;
+ }
+ else {
+ thisGuysTrail = incomingTrail + "|" + thisElementNodeType + modInfo4Trail;
+ }
+
+ // We only want to ensure that a particular element does not repeat on a single "branch".
+ // It could show up on other branches in the case where it is a sub-model which is being
+ // used in more than one place.
+ //
+ List<String> thisTrailsVidsTraversed = new ArrayList <String>();
+ thisTrailsVidsTraversed.addAll(vidsTraversed);
+ thisTrailsVidsTraversed.add(thisElemVid);
+
+ // Look at the elements pointed to at this level and add on their data
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, elementVtxForThisLevel, "model-element", "constrained-element-set");
+
+ while( vertI != null && vertI.hasNext() ){
+ Vertex tmpVert = vertI.next();
+ String vid = tmpVert.id().toString();
+ Map<String,Object> elementHash = new HashMap<String, Object>();
+ String connectToType = tmpVert.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( connectToType != null && connectToType.equals("model-element") ){
+ // A nice, regular old model-element
+ elementHash.put(vid, tmpVert);
+ }
+ else if( (connectToType != null) && connectToType.equals("constrained-element-set") ){
+ // translate the constrained-element-set into a hash of model-element Vertex's
+ String constrainedElementSetUuid = tmpVert.<String>property("constrained-element-set-uuid").orElse(null);
+ if( (modConstraintHash2Use != null) && modConstraintHash2Use.containsKey(constrainedElementSetUuid) ){
+ // This constrained-element-set is being superseded by a different one
+ Vertex replacementConstraintVert = modConstraintHash.get(constrainedElementSetUuid);
+ elementHash = getNextStepElementsFromSet( replacementConstraintVert );
+ // Now that we've found and used the replacement constraint, we don't need to carry it along any farther
+ modConstraintHash.remove(constrainedElementSetUuid);
+ }
+ else {
+ elementHash = getNextStepElementsFromSet( tmpVert );
+ }
+ }
+ else {
+ String msg = " model element has [connectedTo] edge to improper nodeType= ["
+ + connectToType + "] trail = [" + incomingTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ for( Map.Entry<String, Object> entry : elementHash.entrySet() ){
+ Vertex elVert = (Vertex)(entry.getValue());
+ String tmpElVid = elVert.id().toString();
+ String tmpElNT = getModElementWidgetType( elVert, thisGuysTrail );
+ String tmpElStepName = getModelElementStepName( elVert, thisGuysTrail);
+
+ List<String> linkagePtList = new ArrayList <String>();
+ Iterator <VertexProperty<Object>> vpI = elVert.properties("linkage-points");
+
+ // I am not sure why, but since "linkage-points" is an xml-element-wrapper in the OXM definition,
+ // we get back the whole array of Strings in one String - but still use the "vtx.properties()" to
+ // get it - but only look at the first thing returned by the iterator.
+ if( vpI.hasNext() ){
+ String tmpLinkageThing = (String)vpI.next().value();
+ linkagePtList = makeSureItsAnArrayList( tmpLinkageThing );
+ }
+
+ if( linkagePtList != null && !linkagePtList.isEmpty() ){
+ // This is as far as we can go, we will use the linkage point info to define the
+ // rest of this "trail"
+ for( int i = 0; i < linkagePtList.size(); i++ ){
+ Multimap<String, String> tmpMap = collectTopology4LinkagePoint( transId, fromAppId,
+ linkagePtList.get(i), thisGuysTrail, currentMap);
+ thisMap.putAll(tmpMap);
+ }
+ }
+ else {
+ check4EdgeRule(tmpElNT, thisElementNodeType);
+ thisMap.put(thisGuysTrail, tmpElStepName);
+ if( !thisTrailsVidsTraversed.contains(tmpElVid) ){
+ // This is one we would like to use - so we'll recursively get it's result set to add to ours
+ Multimap<String, String> tmpMap = collectTopology4ModelVer( transId, fromAppId,
+ elVert, thisGuysTrail,
+ currentMap, thisTrailsVidsTraversed, levelCounter,
+ modConstraintHash2Use, subModelFirstModInvId, subModelFirstModVerId );
+ thisMap.putAll(tmpMap);
+ }
+ else {
+ String modelElementUuid = elVert.<String>property("model-element-uuid").orElse(null);
+ String msg = "Bad Model Definition: looping model-element (model-element-uuid = [" +
+ modelElementUuid + "]) found trying to add step: [" + tmpElStepName + "], " +
+ " on trail = [" + thisGuysTrail + "]. ";
+ System.out.println( msg );
+ throw new AAIException("AAI_6132", msg);
+ }
+ }
+ }
+ }
+
+ return thisMap;
+
+ } // End of collectTopology4ModelVer()
+
+
+ /**
+ * Check 4 edge rule.
+ *
+ * @param nodeTypeA the node type A
+ * @param nodeTypeB the node type B
+ * @param loader the db maps
+ * @throws AAIException the AAI exception
+ */
+ public void check4EdgeRule( String nodeTypeA, String nodeTypeB) throws AAIException {
+ // Throw an exception if there is no defined edge rule for this combination of nodeTypes in DbEdgeRules.
+
+ final EdgeRules edgeRules = EdgeRules.getInstance();
+
+ if( !edgeRules.hasEdgeRule(nodeTypeA, nodeTypeB)
+ && !edgeRules.hasEdgeRule(nodeTypeB, nodeTypeA) ){
+ // There's no EdgeRule for this -- find out if one of the nodeTypes is invalid or if
+ // they are valid, but there's just no edgeRule for them.
+ try {
+ loader.introspectorFromName(nodeTypeA);
+ } catch (AAIUnknownObjectException e) {
+ String emsg = " Unrecognized nodeType aa [" + nodeTypeA + "]\n";
+ throw new AAIException("AAI_6115", emsg);
+ }
+ try {
+ loader.introspectorFromName(nodeTypeB);
+ } catch (AAIUnknownObjectException e) {
+ String emsg = " Unrecognized nodeType bb [" + nodeTypeB + "]\n";
+ throw new AAIException("AAI_6115", emsg);
+ }
+
+ String msg = " No Edge Rule found for this pair of nodeTypes (order does not matter) ["
+ + nodeTypeA + "], [" + nodeTypeB + "].";
+ throw new AAIException("AAI_6120", msg);
+ }
+
+
+ }
+
+
+ /**
+ * Collect topology 4 linkage point.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param linkagePointStr -- Note it is in reverse order from where we connect to it.
+ * @param incomingTrail -- trail of nodeTypes that got us here (this vertex) from the top
+ * @param currentMap the current map -- that got us to this point (that we will use as the base of the map we will return)
+ * @param loader the db maps
+ * @return Map of the topology
+ * @throws AAIException the AAI exception
+ */
+ public Multimap<String, String> collectTopology4LinkagePoint( String transId, String fromAppId,
+ String linkagePointStrVal, String incomingTrail, Multimap <String,String> currentMap)
+ throws AAIException {
+
+ Multimap <String, String> thisMap = ArrayListMultimap.create();
+ thisMap.putAll(currentMap);
+ String thisGuysTrail = incomingTrail;
+
+ // NOTE - "trails" can have multiple parts now since we track persona info for some.
+ // We just want to look at the node type info - which would be the piece
+ // before any commas (if there are any).
+
+ String [] trailSteps = thisGuysTrail.split("\\|");
+ if( trailSteps == null || trailSteps.length == 0 ){
+ throw new AAIException("AAI_6125", "Bad incomingTrail passed to collectTopology4LinkagePoint(): [" + incomingTrail + "] ");
+ }
+ String lastStepString = trailSteps[trailSteps.length - 1];
+ String [] stepPieces = lastStepString.split(",");
+ String lastStepNT = stepPieces[0];
+
+ // It is assumed that the linkagePoint string will be a pipe-delimited string where each
+ // piece is an AAIProperties.NODE_TYPE. For now, the first thing to connect to is what is on the farthest right.
+ // Example: linkagePoint = "pserver|p-interface|l-interface" would mean that we're connecting to the l-interface
+ // but that after that, we connect to a p-interface followed by a pserver.
+ // It might have been more clear to define it in the other direction, but for now, that is it. (16-07)
+ String linkagePointStr = linkagePointStrVal;
+
+ // We are getting these with more than linkage thing in one string.
+ // Ie. "pserver|lag-interface|l-interface, pserver|p-interface|l-interface, vlan|l-interface"
+ linkagePointStr = linkagePointStr.replace("[", "");
+ linkagePointStr = linkagePointStr.replace("]", "");
+ linkagePointStr = linkagePointStr.replace(" ", "");
+
+ String [] linkage = linkagePointStr.split("\\,");
+ for( int x = 0; x < linkage.length; x++ ){
+ lastStepNT = stepPieces[0];
+ String thisStepNT = "";
+ String [] linkageSteps = linkage[x].split("\\|");
+ if( linkageSteps == null || linkageSteps.length == 0 ){
+ throw new AAIException("AAI_6125", "Bad linkagePointStr passed to collectTopology4LinkagePoint(): [" + linkagePointStr + "] ");
+ }
+ for( int i=(linkageSteps.length - 1); i >= 0; i-- ){
+ thisStepNT = linkageSteps[i];
+ check4EdgeRule(lastStepNT, thisStepNT);
+ thisMap.put(thisGuysTrail, thisStepNT);
+ thisGuysTrail = thisGuysTrail + "|" + thisStepNT;
+ lastStepNT = thisStepNT;
+ }
+ }
+ return thisMap;
+
+ } // End of collectTopology4LinkagePoint()
+
+
+ /**
+ * Gets the next step elements from set.
+ *
+ * @param constrElemSetVtx the constr elem set vtx
+ * @return Hash of the set of model-elements this set represents
+ * @throws AAIException the AAI exception
+ */
+ public Map<String,Object> getNextStepElementsFromSet( Vertex constrElemSetVtx )
+ throws AAIException {
+ // Take a constrained-element-set and figure out the total set of all the possible elements that it
+ // represents and return them as a Hash.
+
+ Map<String,Object> retElementHash = new HashMap<String, Object>();
+
+ if( constrElemSetVtx == null ){
+ String msg = " getNextStepElementsFromSet() called with null constrElemSetVtx ";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ String constrNodeType = constrElemSetVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ String constrElemSetUuid = constrElemSetVtx.<String>property("constrained-element-set-uuid").orElse(null);
+ if( constrNodeType == null || !constrNodeType.equals("constrained-element-set") ){
+ String msg = " getNextStepElementsFromSet() called with wrong type model: [" + constrNodeType + "]. ";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ ArrayList <Vertex> choiceSetVertArray = new ArrayList<Vertex>();
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, constrElemSetVtx, "element-choice-set");
+ int setCount = 0;
+ while( vertI != null && vertI.hasNext() ){
+ Vertex choiceSetVertex = vertI.next();
+ String constrSetType = choiceSetVertex.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( constrSetType != null && constrSetType.equals("element-choice-set") ){
+ choiceSetVertArray.add(choiceSetVertex);
+ setCount++;
+ }
+ }
+
+ if( setCount == 0 ){
+ String msg = "No element-choice-set found under constrained-element-set-uuid = " + constrElemSetUuid;
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ // Loop through each choice-set and grab the model-elements
+ for( int i = 0; i < setCount; i++ ){
+ Vertex choiceSetVert = choiceSetVertArray.get(i);
+ Iterator<Vertex> mVertI = this.traverseIncidentEdges(EdgeType.TREE, choiceSetVert, "model-element");
+ int elCount = 0;
+ while( mVertI != null && mVertI.hasNext() ){
+ Vertex tmpElVertex = mVertI.next();
+ String elNodeType = tmpElVertex.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( elNodeType != null && elNodeType.equals("model-element") ){
+ String tmpVid = tmpElVertex.id().toString();
+ retElementHash.put(tmpVid, tmpElVertex);
+ elCount++;
+ }
+ else {
+ // unsupported node type found for this choice-set
+ String msg = "Unsupported nodeType (" + elNodeType
+ + ") found under choice-set under constrained-element-set-uuid = " + constrElemSetUuid;
+ throw new AAIException("AAI_6132", msg);
+ }
+ }
+
+ if( elCount == 0 ){
+ String msg = "No model-elements found in choice-set under constrained-element-set-uuid = " + constrElemSetUuid;
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ }
+ return retElementHash;
+
+ } // End of getNextStepElementsFromSet()
+
+
+
+ /**
+ * Gen topo map 4 named Q.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param queryVertex the query vertex
+ * @param namedQueryUuid the named query uuid
+ * @return MultiMap of valid next steps for each potential query-element
+ * @throws AAIException the AAI exception
+ */
+ public Multimap<String, String> genTopoMap4NamedQ( String transId, String fromAppId,
+ Vertex queryVertex, String namedQueryUuid )
+ throws AAIException {
+
+ if( queryVertex == null ){
+ throw new AAIException("AAI_6125", "null queryVertex passed to genTopoMap4NamedQ()");
+ }
+
+ Multimap <String, String> initialEmptyMap = ArrayListMultimap.create();
+ List<String> vidsTraversed = new ArrayList<>();
+
+ Vertex firstElementVertex = null;
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, queryVertex, "named-query-element");
+ int elCount = 0;
+ while( vertI != null && vertI.hasNext() ){
+ elCount++;
+ firstElementVertex = vertI.next();
+ }
+
+ if( elCount > 1 ){
+ throw new AAIException("AAI_6133", "Illegal query defined: More than one first element defined for = " + namedQueryUuid);
+ }
+
+ if( firstElementVertex == null ){
+ throw new AAIException("AAI_6114", "Could not find first query element = " + namedQueryUuid);
+ }
+
+ Vertex modVtx = getModelThatNqElementRepresents( firstElementVertex, "" );
+ String modelType = getModelTypeFromModel( modVtx, "" );
+ if( ! modelType.equals("widget") ){
+ throw new AAIException("AAI_6133", "Bad Named Query Definition: First element must correspond to a widget type model. Named Query UUID = "
+ + namedQueryUuid);
+ }
+
+ Multimap <String, String> collectedMap = collectTopology4NamedQ( transId, fromAppId,
+ firstElementVertex, "",
+ initialEmptyMap, vidsTraversed, 0);
+
+ return collectedMap;
+
+ } // End of genTopoMap4NamedQ()
+
+
+
+ /**
+ * Collect topology 4 named Q.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param thisLevelElemVtx the model element vertex for this level
+ * @param levelCounter the level counter
+ * @return resultSet
+ * @throws AAIException the AAI exception
+ */
+ public Multimap<String, String> collectTopology4NamedQ( String transId, String fromAppId,
+ Vertex thisLevelElemVtx, String incomingTrail,
+ Multimap <String,String> currentMap, List<String> vidsTraversed, int levelCounter )
+ throws AAIException {
+
+ levelCounter++;
+
+ Multimap <String, String> thisMap = ArrayListMultimap.create();
+ thisMap.putAll(currentMap);
+
+ String thisElemVid = thisLevelElemVtx.id().toString();
+ if( levelCounter > MAX_LEVELS ) {
+ throw new AAIException("AAI_6125", "collectModelStructure() has looped across more levels than allowed: " + MAX_LEVELS + ". ");
+ }
+ String thisGuysTrail = "";
+
+ // find out what widget (and thereby what aai-node-type) this element represents
+ String thisElementNodeType = getNqElementWidgetType( transId, fromAppId, thisLevelElemVtx, incomingTrail );
+
+ if( incomingTrail.equals("") ){
+ // This is the first one
+ thisGuysTrail = thisElementNodeType;
+ }
+ else {
+ thisGuysTrail = incomingTrail + "|" + thisElementNodeType;
+ }
+
+ vidsTraversed.add(thisElemVid);
+
+ // Look at the elements pointed to at this level and add on their data
+ Iterator<Vertex> vertI = this.traverseIncidentEdges(EdgeType.TREE, thisLevelElemVtx, "named-query-element");
+ while( vertI != null && vertI.hasNext() ){
+ Vertex tmpVert = vertI.next();
+ String tmpVid = tmpVert.id().toString();
+ String tmpElNT = getNqElementWidgetType( transId, fromAppId, tmpVert, thisGuysTrail );
+ thisMap.put(thisGuysTrail, tmpElNT);
+ if( !vidsTraversed.contains(tmpVid) ){
+ // This is one we would like to use - so we'll recursively get it's result set to add to ours
+ Multimap<String, String> tmpMap = collectTopology4NamedQ( transId, fromAppId,
+ tmpVert, thisGuysTrail,
+ currentMap, vidsTraversed, levelCounter);
+ thisMap.putAll(tmpMap);
+ }
+ }
+
+ return thisMap;
+
+ } // End of collectTopology4NamedQ()
+
+
+ /**
+ * Gets the model that NamedQuery element represents.
+ *
+ * @param elementVtx the NQ element vtx
+ * @param elementTrail the element trail
+ * @return the model that element represents
+ * @throws AAIException the AAI exception
+ */
+ public Vertex getModelThatNqElementRepresents( Vertex elementVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the model that a named-query element represents
+ Vertex modVtx = null;
+ Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.COUSIN, elementVtx, "model");
+ int modCount = 0;
+ while( mvertI != null && mvertI.hasNext() ){
+ modCount++;
+ modVtx = mvertI.next();
+ }
+
+ if( modCount > 1 ){
+ String msg = "Illegal element defined: More than one model pointed to by a single named-query-element at [" +
+ elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ if( modVtx == null ){
+ String msg = "Bad named-query definition: Could not find model for element. ";
+ if( !elementTrail.equals("") ){
+ msg = "Bad named-query definition: Could not find model for named-query-element at [" + elementTrail + "].";
+ }
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ String nodeType = modVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( (nodeType != null) && nodeType.equals("model") ){
+ return modVtx;
+ }
+ else {
+ String msg = "Illegal Named Query element defined: expecting a 'model', but found 'isA' edge pointing to nodeType = " +
+ nodeType + "] at [" + elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ }// getModelThatNqElementRepresents()
+
+
+ /**
+ * Gets the model-ver that element represents.
+ *
+ * @param elementVtx the element vtx
+ * @param elementTrail the element trail
+ * @return the model-ver that element represents
+ * @throws AAIException the AAI exception
+ */
+ public Vertex getModelVerThatElementRepresents( Vertex elementVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the model-ver that an element represents
+ Vertex modVerVtx = null;
+ Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.COUSIN, elementVtx, "model-ver");
+ int modCount = 0;
+ while( mvertI != null && mvertI.hasNext() ){
+ modCount++;
+ modVerVtx = mvertI.next();
+ }
+
+ if( modCount > 1 ){
+ String msg = "Illegal element defined: More than one model pointed to by a single element at [" +
+ elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ if( modVerVtx == null ){
+ String msg = "Bad model definition: Could not find model-ver for model-element. ";
+ if( !elementTrail.equals("") ){
+ msg = "Bad model definition: Could not find model-VER for model-element at [" + elementTrail + "].";
+ }
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ String nodeType = modVerVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ if( (nodeType != null) && nodeType.equals("model-ver") ){
+ return modVerVtx;
+ }
+ else {
+ String msg = "Illegal model-element defined: expecting a 'model-ver', but found 'isA' edge pointing to nodeType = " +
+ nodeType + "] at [" + elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ }// getModelVerThatElementRepresents()
+
+
+
+ /**
+ * Gets the model that is parent to model-ver node.
+ *
+ * @param modVerVtx the model-ver vtx
+ * @param elementTrail the element trail
+ * @return the model that element represents
+ * @throws AAIException the AAI exception
+ */
+ public Vertex getModelGivenModelVer( Vertex modVerVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the parent model for this "model-ver" node
+ Vertex modVtx = null;
+ Iterator<Vertex> mvertI = this.traverseIncidentEdges(EdgeType.TREE, modVerVtx, "model");
+ int modCount = 0;
+ while( mvertI != null && mvertI.hasNext() ){
+ modCount++;
+ modVtx = mvertI.next();
+ }
+
+ if( modCount > 1 ){
+ String msg = "Illegal model-ver node defined: More than one model points to it with a 'has' edge [" +
+ elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+ if( modVtx == null ){
+ String msg = "Bad model-ver node: Could not find parent model. ";
+ if( !elementTrail.equals("") ){
+ msg = "Bad model-ver node: Could not find parent model. [" + elementTrail + "].";
+ }
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ String nodeType = modVtx.<String>property(AAIProperties.NODE_TYPE).orElse(null);;
+ if( (nodeType != null) && nodeType.equals("model") ){
+ // Found what we were looking for.
+ return modVtx;
+ }
+ else {
+ // Something is amiss
+ String msg = " Could not find parent model node for model-ver node at [" +
+ elementTrail + "].";
+ throw new AAIException("AAI_6125", msg);
+ }
+
+
+ }// getModelGivenModelVer()
+
+
+
+ /**
+ * Gets the model type.
+ *
+ * @param modelVtx the model vtx
+ * @param elementTrail the element trail
+ * @return the model type
+ * @throws AAIException the AAI exception
+ */
+ public String getModelTypeFromModel( Vertex modelVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the model-type from a model vertex
+ if( modelVtx == null ){
+ String msg = " null modelVtx passed to getModelTypeFromModel() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ String modelType = modelVtx.<String>property("model-type").orElse(null);
+ if( (modelType == null) || modelType.equals("") ){
+ String msg = "Could not find model-type for model encountered at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ if( !modelType.equals("widget") && !modelType.equals("resource") && !modelType.equals("service") ){
+ String msg = "Unrecognized model-type, [" + modelType + "] for model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ return modelType;
+
+ }// getModelTypeFromModel()
+
+
+
+ /**
+ * Gets the model type given model-ver
+ *
+ * @param modelVerVtx the model-ver vtx
+ * @param elementTrail the element trail
+ * @return the model type
+ * @throws AAIException the AAI exception
+ */
+ public String getModelTypeFromModelVer( Vertex modelVerVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the model-type given a model-ver vertex
+ if( modelVerVtx == null ){
+ String msg = " null modelVerVtx passed to getModelTypeFromModelVer() ";
+ throw new AAIException("AAI_6114", msg);
+ }
+
+ Vertex modVtx = getModelGivenModelVer( modelVerVtx, elementTrail );
+ String modelType = modVtx.<String>property("model-type").orElse(null);
+ if( (modelType == null) || modelType.equals("") ){
+ String msg = "Could not find model-type for model encountered at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ if( !modelType.equals("widget") && !modelType.equals("resource") && !modelType.equals("service") ){
+ String msg = "Unrecognized model-type, [" + modelType + "] for model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ return modelType;
+
+ }// getModelTypeFromModelVer()
+
+
+
+ /**
+ * Gets the model-element step name.
+ *
+ * @param elementVtx the model-element vtx
+ * @param elementTrail the element trail
+ * @param loader the db maps
+ * @return the element step name
+ * @throws AAIException the AAI exception
+ */
+ public String getModelElementStepName( Vertex elementVtx, String elementTrail)
+ throws AAIException {
+
+ // Get the "step name" for a model-element
+ // Step names look like this for widget-models: AAIProperties.NODE_TYPE
+ // Step names look like this for resource/service models: "aai-node-type,model-invariant-id,model-version-id"
+ // NOTE -- if the element points to a resource or service model, then we'll return the
+ // widget-type of the first element (crown widget) for that model.
+ String thisElementNodeType = "?";
+ Vertex modVerVtx = getModelVerThatElementRepresents( elementVtx, elementTrail );
+ String modelType = getModelTypeFromModelVer( modVerVtx, elementTrail );
+
+ if( modelType == null ){
+ String msg = " could not determine modelType in getModelElementStepName(). elementTrail = [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ if( modelType.equals("widget") ){
+ // NOTE: for models that have model-type = "widget", their "model-name" maps directly to aai-node-type
+ thisElementNodeType = modVerVtx.<String>property("model-name").orElse(null);
+ if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
+ String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+ return thisElementNodeType;
+ }
+ else if( modelType.equals("resource") || modelType.equals("service") ){
+ Vertex modVtx = getModelGivenModelVer( modVerVtx, elementTrail );
+ String modInvId = modVtx.<String>property("model-invariant-id").orElse(null);
+ String modVerId = modVerVtx.<String>property("model-version-id").orElse(null);
+ Vertex relatedTopElementModelVtx = getTopElementForSvcOrResModelVer( modVerVtx, elementTrail );
+ Vertex relatedModelVtx = getModelVerThatElementRepresents( relatedTopElementModelVtx, elementTrail );
+ thisElementNodeType = relatedModelVtx.<String>property("model-name").orElse(null);
+
+ if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
+ String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ String stepName = "";
+ if( nodeTypeSupportsPersona(thisElementNodeType) ){
+ // This nodeType that this resource or service model refers to does support persona-related fields, so
+ // we will use model-invariant-id and model-version-id as part of the step name.
+ stepName = thisElementNodeType + "," + modInvId + "," + modVerId;
+ }
+ else {
+ stepName = thisElementNodeType;
+ }
+ return stepName;
+ }
+ else {
+ String msg = " Unrecognized model-type = [" + modelType + "] pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ }// getModelElementStepName()
+
+
+
+ /**
+ * Node type supports persona.
+ *
+ * @param nodeType the node type
+ * @param loader the db maps
+ * @return the boolean
+ * @throws AAIException the AAI exception
+ */
+ public Boolean nodeTypeSupportsPersona(String nodeType)
+ throws AAIException {
+
+ if( nodeType == null || nodeType.equals("") ){
+ return false;
+ }
+ Introspector obj = null;
+ try {
+ obj = loader.introspectorFromName(nodeType);
+ } catch (AAIUnknownObjectException e) {
+ String emsg = " Unrecognized nodeType [" + nodeType + "]\n";
+ throw new AAIException("AAI_6115", emsg);
+ }
+
+ Collection <String> props4ThisNT = loader.introspectorFromName(nodeType).getProperties();
+ if( !props4ThisNT.contains(addDBAliasedSuffix("model-invariant-id")) || !props4ThisNT.contains(addDBAliasedSuffix("model-version-id")) ){
+ return false;
+ }
+ else {
+ return true;
+ }
+
+ }// nodeTypeSupportsPersona()
+
+
+ /**
+ * Gets a Named Query element's widget type.
+ *
+ * @param elementVtx the named-query element vtx
+ * @param elementTrail the element trail
+ * @return the element widget type
+ * @throws AAIException the AAI exception
+ */
+ public String getNqElementWidgetType( String transId, String fromAppId,
+ Vertex elementVtx, String elementTrail )
+ throws AAIException {
+
+ String thisNqElementWidgetType = "";
+ // Get the associated node-type for the model pointed to by a named-query-element.
+ // NOTE -- if the element points to a resource or service model, then we'll return the
+ // widget-type of the first element (crown widget) for that model.
+ Vertex modVtx = getModelThatNqElementRepresents( elementVtx, elementTrail );
+ String modelType = getModelTypeFromModel( modVtx, elementTrail );
+
+ if( modelType == null || !modelType.equals("widget") ){
+ String emsg = " Model Type must be 'widget' for NamedQuery elements. Found [" + modelType + "] at [" +
+ elementTrail + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+ else {
+ // For a Widget model, the nodeType is just mapped to the model-element.model-name
+ List<Vertex> modVerVtxArr = getModVersUsingModel(transId, fromAppId, modVtx);
+ if( modVerVtxArr != null && !modVerVtxArr.isEmpty() ){
+ thisNqElementWidgetType = (modVerVtxArr.get(0)).<String>property("model-name").orElse(null);
+ }
+ if( thisNqElementWidgetType == null || thisNqElementWidgetType.equals("") ){
+ String emsg = " Widget type could not be determined at [" + elementTrail + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+ else {
+ return thisNqElementWidgetType;
+ }
+ }
+
+
+ }// End getNqElementWidgetType()
+
+
+ /**
+ * Gets a model-element's top widget type.
+ *
+ * @param elementVtx the model element vtx
+ * @param elementTrail the element trail
+ * @return the element widget type
+ * @throws AAIException the AAI exception
+ */
+ public String getModElementWidgetType( Vertex elementVtx, String elementTrail )
+ throws AAIException {
+
+ // Get the associated node-type for the model-ver pointed to by a model-element.
+ // NOTE -- if the element points to a resource or service model, then we'll return the
+ // widget-type of the first element (crown widget) for that model.
+ Vertex modVerVtx = getModelVerThatElementRepresents( elementVtx, elementTrail );
+ String thisElementNodeType = getModelVerTopWidgetType( modVerVtx, elementTrail );
+ return thisElementNodeType;
+
+ }// End getModElementWidgetType()
+
+
+ /**
+ * Gets the node using unique identifier
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param nodeType the nodeType
+ * @param idPropertyName the property name of the unique identifier
+ * @param uniqueIdVal the UUID value
+ * @return unique vertex found using UUID
+ * @throws AAIException the AAI exception
+ */
+ public Vertex getNodeUsingUniqueId( String transId, String fromAppId,
+ String nodeType, String idPropertyName, String uniqueIdVal )
+ throws AAIException {
+
+ // Given a unique identifier, get the Vertex
+ if( uniqueIdVal == null || uniqueIdVal.equals("") ){
+ String emsg = " Bad uniqueIdVal passed to getNodeUsingUniqueId(): ["
+ + uniqueIdVal + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ if( idPropertyName == null || idPropertyName.equals("") ){
+ String emsg = " Bad idPropertyName passed to getNodeUsingUniqueId(): ["
+ + idPropertyName + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ if( nodeType == null || nodeType.equals("") ){
+ String emsg = " Bad nodeType passed to getNodeUsingUniqueId(): ["
+ + nodeType + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ Vertex uniqVtx = null;
+ Iterable <?> uniqVerts = null;
+ uniqVerts = engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,nodeType).has(idPropertyName,uniqueIdVal).toList();
+ if( uniqVerts == null ){
+ String emsg = "Node could not be found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ + "], propertyValue = [" + uniqueIdVal + "]\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ else {
+ int count = 0;
+ Iterator <?> uniqVertsIter = uniqVerts.iterator();
+ if( !uniqVertsIter.hasNext() ){
+ String emsg = "Node could not be found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ + "], propertyValue = [" + uniqueIdVal + "]\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ else {
+ while( uniqVertsIter.hasNext() ){
+ count++;
+ uniqVtx = (Vertex) uniqVertsIter.next();
+ if( count > 1 ){
+ String emsg = "More than one node found for nodeType = [" + nodeType
+ + "], propertyName = [" + idPropertyName
+ + "], propertyValue = [" + uniqueIdVal + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+ }
+ }
+ }
+
+ return uniqVtx;
+ }// End getNodeUsingUniqueId()
+
+
+ /**
+ * Gets the model-ver nodes using name.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelName the model name
+ * @return the model-ver's that use this name
+ * @throws AAIException the AAI exception
+ */
+ public List<Vertex> getModelVersUsingName( String transId, String fromAppId,
+ String modelName )
+ throws AAIException {
+
+ // Given a "model-name", find the model-ver vertices that this maps to
+ if( modelName == null || modelName.equals("") ){
+ String emsg = " Bad modelName passed to getModelVersUsingName(): ["
+ + modelName + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ List<Vertex> retVtxArr = new ArrayList<>();
+ Iterator<Vertex> modVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-name",modelName);
+ if( !modVertsIter.hasNext() ){
+ String emsg = "Model-ver record(s) could not be found for model-ver data passed. model-name = [" +
+ modelName + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+ else {
+ while( modVertsIter.hasNext() ){
+ Vertex tmpModelVerVtx = (Vertex) modVertsIter.next();
+ retVtxArr.add(tmpModelVerVtx);
+ }
+ }
+
+ return retVtxArr;
+
+ }// End getModelVersUsingName()
+
+
+ /**
+ * Gets the model-ver nodes using model-invariant-id.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param model-invariant-id (uniquely identifies a model)
+ * @return the model-ver's defined for the corresponding model
+ * @throws AAIException the AAI exception
+ */
+ public Iterator<Vertex> getModVersUsingModelInvId( String transId, String fromAppId,
+ String modelInvId )
+ throws AAIException {
+
+ // Given a "model-invariant-id", find the model-ver nodes that this maps to
+ if( modelInvId == null || modelInvId.equals("") ){
+ String emsg = " Bad model-invariant-id passed to getModVersUsingModelInvId(): ["
+ + modelInvId + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ Vertex modVtx = getNodeUsingUniqueId(transId, fromAppId, "model", "model-invariant-id", modelInvId);
+ List<Vertex> retVtxArr = getModVersUsingModel(transId, fromAppId, modVtx);
+ if( retVtxArr == null || retVtxArr.isEmpty() ){
+ String emsg = " Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
+ modelInvId + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+
+ return retVtxArr.iterator();
+ }// End getModVersUsingModelInvId()
+
+
+ /**
+ * Gets the model-ver nodes using a model node.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param model vertex
+ * @return the model-ver's defined for the corresponding model
+ * @throws AAIException the AAI exception
+ */
+ public List<Vertex> getModVersUsingModel( String transId, String fromAppId,
+ Vertex modVtx )
+ throws AAIException {
+
+ if( modVtx == null ){
+ String emsg = " Null model vertex passed to getModVersUsingModel(): ";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ List<Vertex> retVtxArr = new ArrayList<>();
+ Iterator<Vertex> modVerVertsIter = this.traverseIncidentEdges(EdgeType.TREE, modVtx, "model-ver");
+ if(!modVerVertsIter.hasNext()){
+ String modelInvId = modVtx.<String>property("model-invariant-id").orElse(null);
+ String emsg = "Model-ver record(s) could not be found attached to model with model-invariant-id = [" +
+ modelInvId + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+ else {
+ while( modVerVertsIter.hasNext() ){
+ Vertex tmpModelVtx = (Vertex) modVerVertsIter.next();
+ retVtxArr.add(tmpModelVtx);
+ }
+ }
+
+ return retVtxArr;
+
+ }// End getModVersUsingModel()
+
+ /**
+ * Gets the model-version-ids using model-name.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelName the model name
+ * @return the model uuids using name
+ * @throws AAIException the AAI exception
+ */
+ public List<String> getModelVerIdsUsingName( String transId, String fromAppId,
+ String modelName )
+ throws AAIException {
+
+ // Given a model-name find the model-ver nodes that it maps to
+ if( modelName == null || modelName.equals("") ){
+ String emsg = " Bad modelName passed to getModelVerIdsUsingName(): ["
+ + modelName + "]\n";
+ throw new AAIException("AAI_6118", emsg);
+ }
+
+ List<String> retArr = new ArrayList<>();
+ Iterator<Vertex> modVerVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-name",modelName);
+ if( !modVerVertsIter.hasNext() ){
+ String emsg = " model-ver record(s) could not be found for model data passed. model-name = [" +
+ modelName + "]\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ else {
+ while( modVerVertsIter.hasNext() ){
+ Vertex modelVerVtx = (Vertex) modVerVertsIter.next();
+ String tmpUuid = modelVerVtx.<String>property("model-version-id").orElse(null);
+ if( (tmpUuid != null) && !tmpUuid.equals("") && !retArr.contains(tmpUuid) ){
+ retArr.add(tmpUuid);
+ }
+ }
+ }
+
+ if( retArr.isEmpty() ){
+ String emsg = "No model-ver record found for model-name = ["
+ + modelName + "]\n";
+ throw new AAIException("AAI_6132", emsg);
+ }
+
+ return retArr;
+ }// End getModelVerIdsUsingName()
+
+
+ /**
+ * Gets the model top widget type.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelVersionId the model-version-id
+ * @param modelInvId the model-invariant-id
+ * @param modelName the model-name
+ * @return the model top widget type
+ * @throws AAIException the AAI exception
+ */
+ public String getModelVerTopWidgetType( String transId, String fromAppId,
+ String modelVersionId, String modelInvId, String modelName )
+ throws AAIException {
+
+ // Could be given a model-ver's key info (model-version-id), OR, just a (non-unique) model-name,
+ // Or just a model-invariant-id (which could have multiple model-ver records under it).
+ // In any case, they should only map to one single "top" node-type for the first element.
+
+ String nodeType = "?";
+ Iterator<Vertex> modVerVertsIter;
+
+ if( modelVersionId != null && !modelVersionId.equals("") ){
+ // this would be the best - we can just look up the model-ver records directly
+ modVerVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-version-id",modelVersionId);
+ }
+ else if( modelName != null && !modelName.equals("") ){
+ modVerVertsIter = this.engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver").has("model-name",modelName);
+ }
+ else if( modelInvId != null && !modelInvId.equals("") ){
+ modVerVertsIter = getModVersUsingModelInvId(transId, fromAppId, modelInvId);
+ }
+ else {
+ String msg = "Neither modelVersionId, modelInvariantId, nor modelName passed to: getModelVerTopWidgetType() ";
+ throw new AAIException("AAI_6120", msg);
+ }
+
+ if( !modVerVertsIter.hasNext() ){
+ String emsg = "model-ver record(s) could not be found for model data passed: modelInvariantId = [" + modelInvId +
+ "], modeVersionId = [" + modelVersionId + "], modelName = [" + modelName + "]\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ else {
+ String lastNT = "";
+ if( !modVerVertsIter.hasNext() ){
+ String emsg = "model-ver record(s) could not be found for model data passed: modelInvariantId = [" + modelInvId +
+ "], modeVersionId = [" + modelVersionId + "], modelName = [" + modelName + "]\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ while( modVerVertsIter.hasNext() ){
+ Vertex tmpModVerVtx = (Vertex) modVerVertsIter.next();
+ String tmpNT = getModelVerTopWidgetType( tmpModVerVtx, "" );
+ if( lastNT != null && !lastNT.equals("") ){
+ if( !lastNT.equals(tmpNT) ){
+ String emsg = "Different top-node-types (" + tmpNT + ", " + lastNT
+ + ") found for model data passed. (" +
+ " modelVersionId = [" + modelVersionId +
+ "], modelId = [" + modelInvId +
+ "], modelName = [" + modelName +
+ "])\n";
+ throw new AAIException("AAI_6114", emsg);
+ }
+ }
+ lastNT = tmpNT;
+ nodeType = tmpNT;
+ }
+ }
+
+ return nodeType;
+
+ }// End getModelVerTopWidgetType()
+
+
+ /**
+ * Gets the widget type that this model-ver starts with.
+ *
+ * @param modVerVtx the model-version vtx
+ * @param elementTrail the element trail
+ * @return the widget type of the starting node of this model
+ * @throws AAIException the AAI exception
+ */
+ public String getModelVerTopWidgetType( Vertex modVerVtx, String elementTrail )
+ throws AAIException {
+ // Get the associated nodeType (Ie. aai-node-type / widget-type) for a model-ver.
+ // NOTE -- if the element points to a resource or service model, then we'll return the
+ // widget-type of the first element (crown widget) for that model.
+ String modelType = getModelTypeFromModelVer( modVerVtx, elementTrail );
+ if( modelType == null ){
+ String msg = " Could not determine modelType in getModelVerTopWidgetType(). elementTrail = [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ String thisElementNodeType = "?";
+ if( modelType.equals("widget") ){
+ // NOTE: for models that have model-type = "widget", their child model-ver nodes will
+ // have "model-name" which maps directly to aai-node-type (all model-ver's under one
+ // model should start with the same widget-type, so we only need to look at one).
+ thisElementNodeType = modVerVtx.<String>property("model-name").orElse(null);
+ if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
+ String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+ }
+ else if( modelType.equals("resource") || modelType.equals("service") ){
+ Vertex relatedTopElementVtx = getTopElementForSvcOrResModelVer( modVerVtx, elementTrail );
+ Vertex relatedModVerVtx = getModelVerThatElementRepresents( relatedTopElementVtx, elementTrail );
+ thisElementNodeType = relatedModVerVtx.<String>property("model-name").orElse(null);
+ if( (thisElementNodeType == null) || thisElementNodeType.equals("") ){
+ String msg = "Could not find model-name for the widget model pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+ }
+ else {
+ String msg = " Unrecognized model-type = [" + modelType + "] pointed to by element at [" + elementTrail + "].";
+ throw new AAIException("AAI_6132", msg);
+ }
+
+ return thisElementNodeType;
+
+ }// getModelVerTopWidgetType()
+
+
+ /**
+ * Validate model.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param modelNameVersionId the model name version id
+ * @param apiVersion the api version
+ * @throws AAIException the AAI exception
+ */
+ public void validateModel(String transId, String fromAppId, String modelVersionIdVal, String apiVersion )
+ throws AAIException{
+
+ // Note - this will throw an exception if the model either can't be found, or if
+ // we can't figure out its topology map.
+ Vertex modelVerVtx = getNodeUsingUniqueId(transId, fromAppId, "model-ver",
+ "model-version-id", modelVersionIdVal);
+ if( modelVerVtx == null ){
+ String msg = " Could not find model-ver with modelVersionId = [" + modelVersionIdVal + "].";
+ throw new AAIException("AAI_6114", msg);
+ }
+ else {
+ Multimap<String, String> topoMap = genTopoMap4ModelVer( transId, fromAppId,
+ modelVerVtx, modelVersionIdVal);
+ String msg = " modelVer [" + modelVersionIdVal + "] topo multiMap looks like: \n[" + topoMap + "]";
+ System.out.println("INFO -- " + msg );
+ }
+ return;
+
+ }// End validateModel()
+
+
+ /**
+ * Validate named query.
+ *
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param namedQueryUuid the named query uuid
+ * @param apiVersion the api version
+ * @throws AAIException the AAI exception
+ */
+ public void validateNamedQuery(String transId, String fromAppId, String namedQueryUuid, String apiVersion )
+ throws AAIException{
+
+ // Note - this will throw an exception if the named query either can't be found, or if
+ // we can't figure out its topology map.
+ Vertex nqVtx = getNodeUsingUniqueId(transId, fromAppId, "named-query",
+ "named-query-uuid", namedQueryUuid);
+
+ if( nqVtx == null ){
+ String msg = " Could not find named-query with namedQueryUuid = [" + namedQueryUuid + "].";
+ throw new AAIException("AAI_6114", msg);
+ }
+ else {
+ //Multimap<String, String> topoMap = genTopoMap4NamedQ( "junkTransId", "junkFromAppId",
+ // graph, nqVtx, namedQueryUuid );
+ //System.out.println("DEBUG -- for test only : --- ");
+ //System.out.println("DEBUG -- topomap = [" + topoMap + "]");
+ }
+ return;
+
+ }// End validateNamedQuery()
+
+
+ /**
+ * Show result set.
+ *
+ * @param resSet the res set
+ * @param levelCount the level count
+ */
+ public void showResultSet( ResultSet resSet, int levelCount ) {
+
+ levelCount++;
+ String propsStr = "";
+ for( int i= 1; i <= levelCount; i++ ){
+ propsStr = propsStr + "-";
+ }
+ if( resSet.getVert() == null ){
+ return;
+ }
+ String nt = resSet.getVert().<String>property(AAIProperties.NODE_TYPE).orElse(null);
+ propsStr = propsStr + "[" + nt + "] ";
+
+ //propsStr = propsStr + " newDataDelFlag = " + resSet.getNewDataDelFlag() + ", trail = " + resSet.getLocationInModelSubGraph();
+ //propsStr = propsStr + "limitDesc = [" + resSet.getPropertyLimitDesc() + "]";
+ propsStr = propsStr + " trail = " + resSet.getLocationInModelSubGraph();
+
+ Map<String,Object> overrideHash = resSet.getPropertyOverRideHash();
+ if( overrideHash != null && !overrideHash.isEmpty() ){
+ for( Map.Entry<String, Object> entry : overrideHash.entrySet() ){
+ String propName = entry.getKey();
+ Object propVal = entry.getValue();
+ propsStr = propsStr + " [" + propName + " = " + propVal + "]";
+ }
+ }
+ else {
+ Iterator<VertexProperty<Object>> pI = resSet.getVert().properties();
+ while( pI.hasNext() ){
+ VertexProperty<Object> tp = pI.next();
+ if( ! tp.key().startsWith("aai")
+ && ! tp.key().equals("source-of-truth")
+ //&& ! tp.key().equals("resource-version")
+ && ! tp.key().startsWith("last-mod")
+ )
+ {
+ propsStr = propsStr + " [" + tp.key() + " = " + tp.value() + "]";
+ }
+ }
+ }
+ // Show the "extra" lookup values too
+ Map<String,Object> extraPropHash = resSet.getExtraPropertyHash();
+ if( extraPropHash != null && !extraPropHash.isEmpty() ){
+ for( Map.Entry<String, Object> entry : extraPropHash.entrySet() ){
+ String propName = entry.getKey();
+ Object propVal = entry.getValue();
+ propsStr = propsStr + " [" + propName + " = " + propVal.toString() + "]";
+ }
+ }
+
+ System.out.println( propsStr );
+ LOGGER.info(propsStr);
+
+ if( !resSet.getSubResultSet().isEmpty() ){
+ ListIterator<ResultSet> listItr = resSet.getSubResultSet().listIterator();
+ while( listItr.hasNext() ){
+ showResultSet( listItr.next(), levelCount );
+ }
+ }
+
+ }// end of showResultSet()
+
+ private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String connectedNodeType) throws AAIUnknownObjectException, AAIException {
+ QueryBuilder builder = this.engine.getQueryBuilder(startV).createEdgeTraversal(treeType, startV, loader.introspectorFromName(connectedNodeType));
+ return builder;
+ }
+
+ private Iterator<Vertex> traverseIncidentEdges(EdgeType treeType, Vertex startV, String... connectedNodeType) throws AAIUnknownObjectException, AAIException {
+ QueryBuilder[] builders = new QueryBuilder[connectedNodeType.length];
+ for (int i = 0; i < connectedNodeType.length; i++) {
+ builders[i] = this.engine.getQueryBuilder(startV).createEdgeTraversal(EdgeType.TREE, startV, loader.introspectorFromName(connectedNodeType[i]));
+ }
+ QueryBuilder builder = this.engine.getQueryBuilder(startV).union(builders);
+ return builder;
+ }
+
+ private String addDBAliasedSuffix(String propName) {
+ return propName + AAIProperties.DB_ALIAS_SUFFIX;
+ }
+
+ protected String getPropNameWithAliasIfNeeded(String nodeType, String propName) throws AAIUnknownObjectException {
+
+ String retPropName = propName;
+ if( loader.introspectorFromName(nodeType).getPropertyMetadata(propName, PropertyMetadata.DB_ALIAS).isPresent() ){
+ return propName + AAIProperties.DB_ALIAS_SUFFIX;
+ }
+ return retPropName;
+ }
+
+}
+
diff --git a/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ResultSet.java b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ResultSet.java
new file mode 100644
index 0000000..9759f23
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/dbgraphgen/ResultSet.java
@@ -0,0 +1,169 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.dbgraphgen;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.dbgen.PropertyLimitDesc;
+
+public class ResultSet {
+ private Vertex vert;
+ private String newDataDelFlag;
+ private String doNotOutputFlag;
+ private String locationInModelSubGraph;
+ private List<ResultSet> subResultSet;
+ private PropertyLimitDesc propertyLimitDesc;
+ private Map<String,Object> propertyOverRideHash;
+ private Map<String,Object> extraPropertyHash;
+
+ /**
+ * Instantiates a new result set.
+ */
+ public ResultSet(){
+ this.vert = null;
+ this.newDataDelFlag = "";
+ this.doNotOutputFlag = "";
+ this.locationInModelSubGraph = "";
+ this.subResultSet = new ArrayList<>();
+ this.propertyLimitDesc = null;
+ this.propertyOverRideHash = new HashMap<>();
+ this.extraPropertyHash = new HashMap<>();
+ }
+
+
+ public void setPropertyLimitDesc(PropertyLimitDesc pld) {
+ this.propertyLimitDesc = pld;
+ }
+
+ /**
+ * Gets the vert.
+ *
+ * @return the vert
+ */
+ public Vertex getVert(){
+ return this.vert;
+ }
+
+ /**
+ * Gets the sub result set.
+ *
+ * @return the sub result set
+ */
+ public List<ResultSet> getSubResultSet(){
+ return this.subResultSet;
+ }
+
+ /**
+ * Gets the new data del flag.
+ *
+ * @return the new data del flag
+ */
+ public String getNewDataDelFlag(){
+ return this.newDataDelFlag;
+ }
+
+ /**
+ * Gets the do not output flag.
+ *
+ * @return the do not output flag
+ */
+ public String getDoNotOutputFlag(){
+ return this.doNotOutputFlag;
+ }
+
+ /**
+ * Gets the location in model sub graph.
+ *
+ * @return the location in model sub graph
+ */
+ public String getLocationInModelSubGraph(){
+ return this.locationInModelSubGraph;
+ }
+
+ /**
+ * Gets the property limit desc.
+ *
+ * @return the property limit desc
+ */
+ public PropertyLimitDesc getPropertyLimitDesc(){
+ return this.propertyLimitDesc;
+ }
+
+ /**
+ * Gets the property over ride hash.
+ *
+ * @return the property over ride hash
+ */
+ public Map<String,Object> getPropertyOverRideHash(){
+ return this.propertyOverRideHash;
+ }
+
+ /**
+ * Gets the extra property hash.
+ *
+ * @return the extra property hash
+ */
+ public Map<String,Object> getExtraPropertyHash(){
+ return this.extraPropertyHash;
+ }
+
+
+ public void setVert(Vertex vert) {
+ this.vert = vert;
+ }
+
+
+ public void setNewDataDelFlag(String newDataDelFlag) {
+ this.newDataDelFlag = newDataDelFlag;
+ }
+
+
+ public void setDoNotOutputFlag(String doNotOutputFlag) {
+ this.doNotOutputFlag = doNotOutputFlag;
+ }
+
+
+ public void setLocationInModelSubGraph(String locationInModelSubGraph) {
+ this.locationInModelSubGraph = locationInModelSubGraph;
+ }
+
+
+ public void setSubResultSet(List<ResultSet> subResultSet) {
+ this.subResultSet = subResultSet;
+ }
+
+
+ public void setPropertyOverRideHash(Map<String, Object> propertyOverRideHash) {
+ this.propertyOverRideHash = propertyOverRideHash;
+ }
+
+
+ public void setExtraPropertyHash(Map<String, Object> extraPropertyHash) {
+ this.extraPropertyHash = extraPropertyHash;
+ }
+
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java b/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java
new file mode 100644
index 0000000..ab986a2
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/dbgraphmap/SearchGraph.java
@@ -0,0 +1,1143 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.dbgraphmap;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.stream.Stream;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilderException;
+import javax.xml.bind.JAXBException;
+
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.eclipse.persistence.dynamic.DynamicEntity;
+import org.eclipse.persistence.dynamic.DynamicType;
+import org.eclipse.persistence.exceptions.DynamicException;
+import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext;
+import org.onap.aai.db.DbMethHelper;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbgen.PropertyLimitDesc;
+import org.onap.aai.dbgraphgen.ModelBasedProcessing;
+import org.onap.aai.dbgraphgen.ResultSet;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.extensions.AAIExtensionMap;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.MoxyLoader;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.parsers.relationship.RelationshipToURI;
+import org.onap.aai.query.builder.QueryBuilder;
+import org.onap.aai.schema.enums.ObjectMetadata;
+import org.onap.aai.schema.enums.PropertyMetadata;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.db.EdgeRule;
+import org.onap.aai.serialization.db.EdgeRules;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TitanDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.exceptions.AAIFormatVertexException;
+import org.onap.aai.serialization.queryformats.utils.UrlBuilder;
+import org.onap.aai.util.StoreNotificationEvent;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.common.base.CaseFormat;
+
+import edu.emory.mathcs.backport.java.util.Collections;
+
+/**
+ * Database Mapping class which acts as the middle man between the REST interface objects
+ * for the Search namespace
+
+ */
+public class SearchGraph {
+
+ private final String COMPONENT = "aaidbmap";
+ private AAIExtensionMap aaiExtMap;
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(SearchGraph.class);
+ /**
+ * Get the search result based on the includeNodeType and depth provided.
+ *
+ * @param fromAppId the from app id
+ * @param transId the trans id
+ * @param startNodeType the start node type
+ * @param startNodeKeyParams the start node key params
+ * @param includeNodeTypes the include node types
+ * @param depth the depth
+ * @param aaiExtMap the aai ext map
+ * @return Response
+ * @throws AAIException the AAI exception
+ */
+ public Response runGenericQuery (
+ HttpHeaders headers,
+ String startNodeType,
+ List <String> startNodeKeyParams,
+ List <String> includeNodeTypes,
+ final int depth,
+ TransactionalGraphEngine dbEngine,
+ Loader loader,
+ UrlBuilder urlBuilder) throws AAIException {
+ Response response = null;
+ boolean success = true;
+ String result = "";
+ try {
+ dbEngine.startTransaction();
+
+ if( startNodeType == null ){
+ throw new AAIException("AAI_6120", "null start-node-type passed to the generic query");
+ }
+
+ if( startNodeKeyParams == null ){
+ throw new AAIException("AAI_6120", "no key param passed to the generic query");
+ }
+
+ if( includeNodeTypes == null ){
+ throw new AAIException("AAI_6120", "no include params passed to the generic query");
+ }
+
+ if (depth > 6) {
+ throw new AAIException("AAI_6120", "The maximum depth supported by the generic query is 6");
+ }
+ final QueryBuilder queryBuilder;
+
+ // there is an issue with service-instance - it is a unique node but still dependent
+ // for now query it directly without attempting to craft a valid URI
+ if (startNodeType.equalsIgnoreCase("service-instance") && startNodeKeyParams.size() == 1) {
+ Introspector obj = loader.introspectorFromName(startNodeType);
+ // Build a hash with keys to uniquely identify the start Node
+ String keyName = null;
+ String keyValue = null;
+
+ QueryBuilder builder = dbEngine.getQueryBuilder().getVerticesByIndexedProperty(AAIProperties.NODE_TYPE, "service-instance");
+ for( String keyData : startNodeKeyParams ){
+ int colonIndex = keyData.indexOf(":");
+ if( colonIndex <= 0 ){
+ throw new AAIException("AAI_6120", "Bad key param passed in: [" + keyData + "]");
+ }
+ else {
+ keyName = keyData.substring(0, colonIndex).split("\\.")[1];
+ keyValue = keyData.substring(colonIndex + 1);
+ builder.getVerticesByProperty(keyName, keyValue);
+ }
+ }
+
+ queryBuilder = builder;
+ } else {
+ URI uri = craftUriFromQueryParams(loader, startNodeType, startNodeKeyParams);
+ queryBuilder = dbEngine.getQueryBuilder().createQueryFromURI(uri).getQueryBuilder();
+ }
+ List<Vertex> results = queryBuilder.toList();
+ if( results.isEmpty()){
+ throw new AAIException("AAI_6114", "No Node of type " +
+ startNodeType +
+ " found for properties: " +
+ startNodeKeyParams.toString());
+ } else if (results.size() > 1) {
+ String detail = "More than one Node found by getUniqueNode for params: " + startNodeKeyParams.toString() + "\n";
+ throw new AAIException("AAI_6112", detail);
+ }
+
+ Vertex startNode = results.get(0);
+
+ Collection <Vertex> ver = new HashSet <>();
+ List<Vertex> queryResults = new ArrayList<>();
+ GraphTraversalSource traversalSource = dbEngine.asAdmin().getReadOnlyTraversalSource();
+ GraphTraversal<Vertex, Vertex> traversal;
+ if (includeNodeTypes.contains(startNodeType) || depth == 0 || includeNodeTypes.contains("all") )
+ ver.add(startNode);
+
+ // Now look for a node of includeNodeType within a given depth
+ traversal = traversalSource.withSideEffect("x", ver).V(startNode)
+ .times(depth).repeat(__.both().store("x")).cap("x").unfold();
+
+ if (!includeNodeTypes.contains("all")) {
+ traversal.where(__.has(AAIProperties.NODE_TYPE, P.within(includeNodeTypes)));
+ }
+ queryResults = traversal.toList();
+
+
+ if( queryResults.isEmpty()){
+ LOGGER.warn("No nodes found - apipe was null/empty");
+ }
+ else {
+
+ Introspector searchResults = createSearchResults(loader, urlBuilder, queryResults);
+
+ String outputMediaType = getMediaType(headers.getAcceptableMediaTypes());
+ org.onap.aai.introspection.MarshallerProperties properties = new org.onap.aai.introspection.MarshallerProperties.Builder(
+ org.onap.aai.restcore.MediaType.getEnum(outputMediaType)).build();
+
+ result = searchResults.marshal(properties);
+ response = Response.ok().entity(result).build();
+
+ LOGGER.debug(ver.size() + " node(s) traversed, " + queryResults.size() + " found");
+ }
+ success = true;
+ } catch (AAIException e) {
+ success = false;
+ throw e;
+ } catch (Exception e) {
+ success = false;
+ throw new AAIException("AAI_5105", e);
+ } finally {
+ if (dbEngine != null) {
+ if (success) {
+ dbEngine.commit();
+ } else {
+ dbEngine.rollback();
+ }
+ }
+
+ }
+
+ return response;
+ }
+
+ private URI craftUriFromQueryParams(Loader loader, String startNodeType, List<String> startNodeKeyParams) throws UnsupportedEncodingException, IllegalArgumentException, UriBuilderException, AAIException {
+ Introspector relationship = loader.introspectorFromName("relationship");
+
+ relationship.setValue("related-to", startNodeType);
+ List<Object> relationshipDataList = relationship.getValue("relationship-data");
+
+ for( String keyData : startNodeKeyParams ){
+ int colonIndex = keyData.indexOf(":");
+ if( colonIndex <= 0 ){
+ throw new AAIException("AAI_6120", "Bad key param passed in: [" + keyData + "]");
+ }
+ else {
+ Introspector data = loader.introspectorFromName("relationship-data");
+ data.setValue("relationship-key", keyData.substring(0, colonIndex));
+ data.setValue("relationship-value", keyData.substring(colonIndex + 1));
+ relationshipDataList.add(data.getUnderlyingObject());
+ }
+ }
+
+ RelationshipToURI parser = new RelationshipToURI(loader, relationship);
+
+ return parser.getUri();
+ }
+
+ /**
+ * Run nodes query.
+ *
+ * @param fromAppId the from app id
+ * @param transId the trans id
+ * @param targetNodeType the target node type
+ * @param edgeFilterParams the edge filter params
+ * @param filterParams the filter params
+ * @param aaiExtMap the aai ext map
+ * @return Response
+ * @throws AAIException the AAI exception
+ */
+ public Response runNodesQuery (
+ HttpHeaders headers,
+ String targetNodeType,
+ List <String> edgeFilterParams,
+ List <String> filterParams,
+ TransactionalGraphEngine dbEngine,
+ Loader loader,
+ UrlBuilder urlBuilder) throws AAIException {
+
+ Response response = null;
+ boolean success = true;
+ String result = "";
+ final String EQUALS = "EQUALS";
+ final String DOES_NOT_EQUAL = "DOES-NOT-EQUAL";
+ final String EXISTS = "EXISTS";
+ final String DOES_NOT_EXIST = "DOES-NOT-EXIST";
+ try {
+
+ dbEngine.startTransaction();
+
+ Introspector target;
+
+ if( targetNodeType == null || targetNodeType == "" ){
+ throw new AAIException("AAI_6120", "null or empty target-node-type passed to the node query");
+ }
+
+ try {
+ target = loader.introspectorFromName(targetNodeType);
+ } catch (AAIUnknownObjectException e) {
+ throw new AAIException("AAI_6115", "Unrecognized nodeType [" + targetNodeType + "] passed to node query.");
+ }
+
+ if( filterParams.isEmpty() && edgeFilterParams.isEmpty()){
+ // For now, it's ok to pass no filter params. We'll just return ALL the nodes of the requested type.
+ LOGGER.warn("No filters passed to the node query");
+ }
+
+ StringBuilder queryStringForMsg = new StringBuilder();
+ GraphTraversal<Vertex, Vertex> traversal = dbEngine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, targetNodeType);
+ queryStringForMsg.append("has(\"aai-node-type\"," + targetNodeType + ")");
+
+ for( String filter : filterParams ) {
+ String [] pieces = filter.split(":");
+ if( pieces.length < 2 ){
+ throw new AAIException("AAI_6120", "bad filter passed to node query: [" + filter + "]");
+ }
+ else {
+ String propName = this.findDbPropName(target, pieces[0]);
+ String filterType = pieces[1];
+ if( filterType.equals(EQUALS)){
+ if( pieces.length < 3 ){
+ throw new AAIException("AAI_6120", "No value passed for filter: [" + filter + "]");
+ }
+ String value = "?";
+ if( pieces.length == 3 ){
+ value = pieces[2];
+ }
+ else if( pieces.length > 3 ){
+ // When a ipv6 address comes in as a value, it has colons in it which require us to
+ // pull the "value" off the end of the filter differently
+ int startPos4Value = propName.length() + filterType.length() + 3;
+ value = filter.substring(startPos4Value);
+ }
+ queryStringForMsg.append(".has(" + propName + "," + value + ")");
+ traversal.has(propName,value);
+ }
+ else if( filterType.equals(DOES_NOT_EQUAL)){
+ if( pieces.length < 3 ){
+ throw new AAIException("AAI_6120", "No value passed for filter: [" + filter + "]");
+ }
+ String value = "?";
+ if( pieces.length == 3 ){
+ value = pieces[2];
+ }
+ else if( pieces.length > 3 ){
+ // When a ipv6 address comes in as a value, it has colons in it which require us to
+ // pull the "value" off the end of the filter differently
+ int startPos4Value = propName.length() + filterType.length() + 3;
+ value = filter.substring(startPos4Value);
+ }
+ queryStringForMsg.append(".hasNot(" + propName + "," + value + ")");
+ traversal.not(__.has(propName,value));
+ }
+ else if( filterType.equals(EXISTS)){
+ queryStringForMsg.append(".has(" + propName + ")");
+ traversal.has(propName);
+ }
+ else if( filterType.equals(DOES_NOT_EXIST)){
+ queryStringForMsg.append(".hasNot(" + propName + ")");
+ traversal.hasNot(propName);
+ }
+ else {
+ throw new AAIException("AAI_6120", "bad filterType passed: [" + filterType + "]");
+ }
+ }
+ }
+
+ if (!edgeFilterParams.isEmpty()) {
+ // edge-filter=pserver:EXISTS: OR pserver:EXISTS:hostname:XXX
+ // edge-filter=pserver:DOES-NOT-EXIST: OR pserver:DOES-NOT-EXIST:hostname:XXX
+ String filter = edgeFilterParams.get(0); // we process and allow only one edge filter for now
+ String [] pieces = filter.split(":");
+ if( pieces.length < 2 || pieces.length == 3 || pieces.length > 4){
+ throw new AAIException("AAI_6120", "bad edge-filter passed: [" + filter + "]");
+ } else {
+ String nodeType = pieces[0].toLowerCase();
+ String filterType = pieces[1].toUpperCase();
+ Introspector otherNode;
+ if (!filterType.equals(EXISTS) && !filterType.equals(DOES_NOT_EXIST)) {
+ throw new AAIException("AAI_6120", "bad filterType passed: [" + filterType + "]");
+ }
+ try {
+ otherNode = loader.introspectorFromName(nodeType);
+ } catch (AAIUnknownObjectException e) {
+ throw new AAIException("AAI_6115", "Unrecognized nodeType [" + nodeType + "] passed to node query.");
+ }
+ String propName = null;
+ String propValue = null;
+ if ( pieces.length >= 3) {
+ propName = this.findDbPropName(otherNode, pieces[2].toLowerCase());
+ propValue = pieces[3];
+ }
+ String[] edgeLabels = getEdgeLabel(targetNodeType, nodeType);
+
+ GraphTraversal<Vertex, Vertex> edgeSearch = __.start();
+
+ edgeSearch.both(edgeLabels).has(AAIProperties.NODE_TYPE, nodeType);
+ if (propName != null) {
+ // check for matching property
+ if (propValue != null) {
+ edgeSearch.has(propName, propValue);
+ } else {
+ edgeSearch.has(propName);
+ }
+ }
+
+ if( filterType.equals(DOES_NOT_EXIST)){
+ traversal.where(__.not(edgeSearch));
+ } else if (filterType.equals(EXISTS)) {
+ traversal.where(edgeSearch);
+ }
+ }
+ }
+
+ List<Vertex> results = traversal.toList();
+ Introspector searchResults = createSearchResults(loader, urlBuilder, results);
+
+ String outputMediaType = getMediaType(headers.getAcceptableMediaTypes());
+ org.onap.aai.introspection.MarshallerProperties properties = new org.onap.aai.introspection.MarshallerProperties.Builder(
+ org.onap.aai.restcore.MediaType.getEnum(outputMediaType)).build();
+
+ result = searchResults.marshal(properties);
+ response = Response.ok().entity(result).build();
+
+ success = true;
+ } catch (AAIException e) {
+ success = false;
+ throw e;
+ } catch (Exception e) {
+ success = false;
+ throw new AAIException("AAI_5105", e);
+ } finally {
+ if (dbEngine != null) {
+ if (success) {
+ dbEngine.commit();
+ } else {
+ dbEngine.rollback();
+ }
+ }
+ }
+
+ return response;
+ }
+
+ protected Introspector createSearchResults(Loader loader, UrlBuilder urlBuilder, List<Vertex> results)
+ throws AAIUnknownObjectException {
+ Introspector searchResults = loader.introspectorFromName("search-results");
+ List<Object> resultDataList = searchResults.getValue("result-data");
+ Stream<Vertex> stream;
+ if (results.size() >= 50) {
+ stream = results.parallelStream();
+ } else {
+ stream = results.stream();
+ }
+ boolean isParallel = stream.isParallel();
+ stream.forEach(v -> {
+ String nodeType = v.<String>property(AAIProperties.NODE_TYPE).orElse(null);
+
+ String thisNodeURL;
+ try {
+ thisNodeURL = urlBuilder.pathed(v);
+ Introspector resultData = loader.introspectorFromName("result-data");
+
+ resultData.setValue("resource-type", nodeType);
+ resultData.setValue("resource-link", thisNodeURL);
+ if (isParallel) {
+ synchronized (resultDataList) {
+ resultDataList.add(resultData.getUnderlyingObject());
+ }
+ } else {
+ resultDataList.add(resultData.getUnderlyingObject());
+ }
+ } catch (AAIException | AAIFormatVertexException e) {
+ throw new RuntimeException(e);
+ }
+
+ });
+ return searchResults;
+ }
+
+ private String findDbPropName(Introspector obj, String propName) {
+
+ Optional<String> result = obj.getPropertyMetadata(propName, PropertyMetadata.DB_ALIAS);
+ if (result.isPresent()) {
+ return result.get();
+ } else {
+ return propName;
+ }
+ }
+
+
+ /**
+ * Gets the edge label.
+ *
+ * @param targetNodeType the target node type
+ * @param nodeType the node type
+ * @return the edge label
+ * @throws AAIException the AAI exception
+ */
+ public static String[] getEdgeLabel(String targetNodeType, String nodeType) throws AAIException{
+ Map<String, EdgeRule> rules = EdgeRules.getInstance().getEdgeRules(targetNodeType, nodeType);
+ String[] results = rules.keySet().toArray(new String[0]);
+ return results;
+ }
+
+
+ /**
+ * Run named query.
+ *
+ * @param fromAppId the from app id
+ * @param transId the trans id
+ * @param queryParameters the query parameters
+ * @param aaiExtMap the aai ext map
+ * @return the response
+ * @throws JAXBException the JAXB exception
+ * @throws AAIException the AAI exception
+ */
+ public Response runNamedQuery(String fromAppId, String transId, String queryParameters,
+ DBConnectionType connectionType,
+ AAIExtensionMap aaiExtMap) throws JAXBException, AAIException {
+
+ Introspector inventoryItems;
+ boolean success = true;
+ TransactionalGraphEngine dbEngine = null;
+ try {
+
+ MoxyLoader loader = (MoxyLoader)LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
+ DynamicJAXBContext jaxbContext = loader.getJAXBContext();
+ dbEngine = new TitanDBEngine(
+ QueryStyle.TRAVERSAL,
+ connectionType,
+ loader);
+ DBSerializer serializer = new DBSerializer(AAIProperties.LATEST, dbEngine, ModelType.MOXY, fromAppId);
+ ModelBasedProcessing processor = new ModelBasedProcessing(loader, dbEngine, serializer);
+
+ dbEngine.startTransaction();
+ org.onap.aai.restcore.MediaType mediaType = org.onap.aai.restcore.MediaType.APPLICATION_JSON_TYPE;
+ String contentType = aaiExtMap.getHttpServletRequest().getContentType();
+ if (contentType != null && contentType.contains("application/xml")) {
+ mediaType = org.onap.aai.restcore.MediaType.APPLICATION_XML_TYPE;
+ }
+
+ if (queryParameters.length() == 0) {
+ queryParameters = "{}";
+ }
+
+ DynamicEntity modelAndNamedQuerySearch = (DynamicEntity)loader.unmarshal("ModelAndNamedQuerySearch", queryParameters, mediaType).getUnderlyingObject();
+ if (modelAndNamedQuerySearch == null) {
+ throw new AAIException("AAI_5105");
+ }
+ HashMap<String,Object> namedQueryLookupHash = new HashMap<String,Object>();
+
+ DynamicEntity qp = modelAndNamedQuerySearch.get("queryParameters");
+ String namedQueryUuid = null;
+ if ((qp != null) && qp.isSet("namedQuery")) {
+ DynamicEntity namedQuery = (DynamicEntity) qp.get("namedQuery");
+
+ if (namedQuery.isSet("namedQueryUuid")) {
+ namedQueryUuid = namedQuery.get("namedQueryUuid");
+ }
+ if (namedQuery.isSet("namedQueryName")) {
+ namedQueryLookupHash.put("named-query-name", namedQuery.get("namedQueryName"));
+ }
+ if (namedQuery.isSet("namedQueryVersion")) {
+ namedQueryLookupHash.put("named-query-version", namedQuery.get("namedQueryVersion"));
+ }
+ }
+
+ if (namedQueryUuid == null) {
+
+ DbMethHelper dbMethHelper = new DbMethHelper(loader, dbEngine);
+ List<Vertex> namedQueryVertices = dbMethHelper.locateUniqueVertices("named-query", namedQueryLookupHash);
+ for (Vertex vert : namedQueryVertices) {
+ namedQueryUuid = vert.<String>property("named-query-uuid").orElse(null);
+ // there should only be one, we'll pick the first if not
+ break;
+ }
+ }
+
+ String secondaryFilterCutPoint = null;
+
+ if (modelAndNamedQuerySearch.isSet("secondaryFilterCutPoint")) {
+ secondaryFilterCutPoint = modelAndNamedQuerySearch.get("secondaryFilterCutPoint");
+ }
+
+ List<Map<String,Object>> startNodeFilterHash = new ArrayList<>();
+
+ mapInstanceFilters((DynamicEntity)modelAndNamedQuerySearch.get("instanceFilters"),
+ startNodeFilterHash, jaxbContext);
+
+ Map<String,Object> secondaryFilterHash = new HashMap<>();
+
+ mapSecondaryFilters((DynamicEntity)modelAndNamedQuerySearch.get("secondaryFilts"),
+ secondaryFilterHash, jaxbContext);
+
+ List<ResultSet> resultSet = processor.queryByNamedQuery(transId, fromAppId,
+ namedQueryUuid, startNodeFilterHash, aaiExtMap.getApiVersion(), secondaryFilterCutPoint, secondaryFilterHash);
+
+ inventoryItems = loader.introspectorFromName("inventory-response-items");
+
+ List<Object> invItemList = unpackResultSet(resultSet, dbEngine, loader, serializer);
+
+ inventoryItems.setValue("inventory-response-item", invItemList);
+ success = true;
+ } catch (AAIException e) {
+ success = false;
+ throw e;
+ } catch (Exception e) {
+ success = false;
+ throw new AAIException("AAI_5105", e);
+ } finally {
+ if (dbEngine != null) {
+ if (success) {
+ dbEngine.commit();
+ } else {
+ dbEngine.rollback();
+ }
+ }
+ }
+
+ return getResponseFromIntrospector(inventoryItems, aaiExtMap.getHttpHeaders());
+ }
+
+ /**
+ * Execute model operation.
+ *
+ * @param fromAppId the from app id
+ * @param transId the trans id
+ * @param queryParameters the query parameters
+ * @param isDelete the is delete
+ * @param aaiExtMap the aai ext map
+ * @return the response
+ * @throws JAXBException the JAXB exception
+ * @throws AAIException the AAI exception
+ * @throws DynamicException the dynamic exception
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ public Response executeModelOperation(String fromAppId, String transId, String queryParameters,
+ DBConnectionType connectionType,
+ boolean isDelete,
+ AAIExtensionMap aaiExtMap) throws JAXBException, AAIException, DynamicException, UnsupportedEncodingException {
+ Response response;
+ boolean success = true;
+ TransactionalGraphEngine dbEngine = null;
+ try {
+
+ MoxyLoader loader = (MoxyLoader) LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
+ DynamicJAXBContext jaxbContext = loader.getJAXBContext();
+ dbEngine = new TitanDBEngine(
+ QueryStyle.TRAVERSAL,
+ connectionType,
+ loader);
+ DBSerializer serializer = new DBSerializer(AAIProperties.LATEST, dbEngine, ModelType.MOXY, fromAppId);
+ ModelBasedProcessing processor = new ModelBasedProcessing(loader, dbEngine, serializer);
+ dbEngine.startTransaction();
+
+
+ org.onap.aai.restcore.MediaType mediaType = org.onap.aai.restcore.MediaType.APPLICATION_JSON_TYPE;
+ String contentType = aaiExtMap.getHttpServletRequest().getContentType();
+ if (contentType != null && contentType.contains("application/xml")) {
+ mediaType = org.onap.aai.restcore.MediaType.APPLICATION_XML_TYPE;
+ }
+
+ if (queryParameters.length() == 0) {
+ queryParameters = "{}";
+ }
+
+ DynamicEntity modelAndNamedQuerySearch = (DynamicEntity)loader.unmarshal("ModelAndNamedQuerySearch", queryParameters, mediaType).getUnderlyingObject();
+ if (modelAndNamedQuerySearch == null) {
+ throw new AAIException("AAI_5105");
+ }
+
+ Map<String,Object> modelQueryLookupHash = new HashMap<>();
+
+ String modelVersionId = null;
+ String modelName = null;
+ String modelInvariantId = null;
+ String modelVersion = null;
+ String topNodeType = null;
+
+ if (modelAndNamedQuerySearch.isSet("topNodeType")) {
+ topNodeType = modelAndNamedQuerySearch.get("topNodeType");
+ }
+
+ // the ways to get a model:
+
+ // 1. model-version-id (previously model-name-version-id
+ // 2. model-invariant-id (previously model-id) + model-version
+ // 3. model-name + model-version
+
+ // we will support both using the OverloadedModel object in the v9 oxm. This allows us to unmarshal
+ // either an old-style model or new-style model + model-ver object
+ if (modelAndNamedQuerySearch.isSet("queryParameters")) {
+ DynamicEntity qp = modelAndNamedQuerySearch.get("queryParameters");
+
+ if (qp.isSet("model")) {
+ DynamicEntity model = (DynamicEntity) qp.get("model");
+
+ // on an old-style model object, the following 4 attrs were all present
+ if (model.isSet("modelNameVersionId")) {
+ modelVersionId = model.get("modelNameVersionId");
+ }
+ if (model.isSet("modelId")) {
+ modelInvariantId = model.get("modelId");
+ }
+ if (model.isSet("modelName")) {
+ modelName = model.get("modelName");
+ }
+ if (model.isSet("modelVersion")) {
+ modelVersion = model.get("modelVersion");
+ }
+
+ // new style splits model-invariant-id from the other 3 attrs. This is
+ // the only way to directly look up the model object
+ if (model.isSet("modelInvariantId")) {
+ modelInvariantId = model.get("modelInvariantId");
+ }
+
+ if (model.isSet("modelVers")) {
+ // we know that this is new style, because modelVers was not an option
+ // before v9
+ DynamicEntity modelVers = (DynamicEntity) model.get("modelVers");
+ if (modelVers.isSet("modelVer")) {
+ List<DynamicEntity> modelVerList = modelVers.get("modelVer");
+ // if they send more than one, too bad, they get the first one
+ DynamicEntity modelVer = modelVerList.get(0);
+ if (modelVer.isSet("modelName")) {
+ modelName = modelVer.get("modelName");
+ }
+ if (modelVer.isSet("modelVersionId")) {
+ modelVersionId = modelVer.get("modelVersionId");
+ }
+ if (modelVer.isSet("modelVersion")) {
+ modelVersion = modelVer.get("modelVersion");
+ }
+ }
+ }
+ }
+ }
+
+ List<Map<String,Object>> startNodeFilterHash = new ArrayList<>();
+
+ String resourceVersion = mapInstanceFilters((DynamicEntity)modelAndNamedQuerySearch.get("instanceFilters"),
+ startNodeFilterHash, jaxbContext);
+
+ if (isDelete) {
+
+ List<ResultSet> resultSet = processor.queryByModel(transId, fromAppId,
+ modelVersionId, modelInvariantId, modelName, topNodeType, startNodeFilterHash, aaiExtMap.getApiVersion() );
+
+ Map<Object,String> objectToVertMap = new HashMap<>();
+ List<Object> invItemList = unpackResultSet(resultSet, dbEngine, loader, serializer);
+
+ ResultSet rs = resultSet.get(0);
+
+ Vertex firstVert = rs.getVert();
+ String restURI = serializer.getURIForVertex(firstVert).toString();
+ String notificationVersion = AAIProperties.LATEST.toString();
+ if (restURI.startsWith("/")) {
+ restURI = "/aai/" + notificationVersion + restURI;
+ } else {
+ restURI = "/aai/" + notificationVersion + "/" + restURI;
+ }
+
+ Map<String,String> delResult = processor.runDeleteByModel( transId, fromAppId,
+ modelVersionId, topNodeType, startNodeFilterHash.get(0), aaiExtMap.getApiVersion(), resourceVersion );
+
+ String resultStr = "";
+ for (Map.Entry<String,String> ent : delResult.entrySet()) {
+ resultStr += "v[" + ent.getKey() + "] " + ent.getValue() + ",\n";
+ }
+ resultStr.trim();
+
+ // Note - notifications are now done down in the individual "remove" calls done in runDeleteByModel() above.
+
+ response = Response.ok(resultStr).build();
+
+ } else {
+ List<ResultSet> resultSet = processor.queryByModel( transId, fromAppId,
+ modelVersionId, modelInvariantId, modelName, topNodeType, startNodeFilterHash, aaiExtMap.getApiVersion() );
+
+ Introspector inventoryItems = loader.introspectorFromName("inventory-response-items");
+
+ List<Object> invItemList = unpackResultSet(resultSet, dbEngine, loader, serializer);
+
+ inventoryItems.setValue("inventory-response-item", invItemList);
+
+ response = getResponseFromIntrospector(inventoryItems, aaiExtMap.getHttpHeaders());
+ }
+ success = true;
+ } catch (AAIException e) {
+ success = false;
+ throw e;
+ } catch (Exception e) {
+ success = false;
+ throw new AAIException("AAI_5105", e);
+ } finally {
+ if (dbEngine != null) {
+ if (success) {
+ dbEngine.commit();
+ } else {
+ dbEngine.rollback();
+ }
+ }
+ }
+
+ return response;
+ }
+
+ private Response getResponseFromIntrospector(Introspector obj, HttpHeaders headers) {
+ boolean isJson = false;
+ for (MediaType mt : headers.getAcceptableMediaTypes()) {
+ if (MediaType.APPLICATION_JSON_TYPE.isCompatible(mt)) {
+ isJson = true;
+ break;
+ }
+ }
+ org.onap.aai.introspection.MarshallerProperties properties;
+ if (isJson) {
+ properties =
+ new org.onap.aai.introspection.MarshallerProperties.Builder(org.onap.aai.restcore.MediaType.APPLICATION_JSON_TYPE).build();
+ } else {
+ properties =
+ new org.onap.aai.introspection.MarshallerProperties.Builder(org.onap.aai.restcore.MediaType.APPLICATION_XML_TYPE).build();
+ }
+
+ String marshalledObj = obj.marshal(properties);
+ return Response.ok().entity(marshalledObj).build();
+ }
+
+ /**
+ * Map instance filters.
+ *
+ * @param instanceFilters the instance filters
+ * @param startNodeFilterHash the start node filter hash
+ * @param jaxbContext the jaxb context
+ * @return the string
+ */
+ private String mapInstanceFilters(DynamicEntity instanceFilters, List<Map<String,Object>> startNodeFilterHash, DynamicJAXBContext jaxbContext) {
+
+ if (instanceFilters == null || !instanceFilters.isSet("instanceFilter")) {
+ return null;
+ }
+ @SuppressWarnings("unchecked")
+ List<DynamicEntity> instanceFilter = (ArrayList<DynamicEntity>)instanceFilters.get("instanceFilter");
+ String resourceVersion = null;
+
+ for (DynamicEntity instFilt : instanceFilter) {
+ List<DynamicEntity> any = instFilt.get("any");
+ HashMap<String,Object> thisNodeFilterHash = new HashMap<String,Object>();
+ for (DynamicEntity anyEnt : any) {
+ String clazz = anyEnt.getClass().getCanonicalName();
+ String simpleClazz = anyEnt.getClass().getSimpleName();
+
+ String nodeType = CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, simpleClazz);
+
+ DynamicType anyEntType = jaxbContext.getDynamicType(clazz);
+
+ for (String propName : anyEntType.getPropertiesNames()) {
+ // hyphencase the prop and throw it on the hash
+ if (anyEnt.isSet(propName)) {
+ thisNodeFilterHash.put(nodeType + "." + CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, propName), anyEnt.get(propName));
+ if (propName.equals("resourceVersion") && resourceVersion == null) {
+ resourceVersion = (String)anyEnt.get(propName);
+ }
+ }
+ }
+ }
+ startNodeFilterHash.add(thisNodeFilterHash);
+ }
+ return resourceVersion;
+ }
+
+ /**
+ * Map secondary filters.
+ *
+ * @param secondaryFilts the secondary filters
+ * @param secondaryFilterHash the secondary filter hash
+ * @param jaxbContext the jaxb context
+ * @return the string
+ */
+ private void mapSecondaryFilters(DynamicEntity secondaryFilts, Map<String,Object> secondaryFilterHash, DynamicJAXBContext jaxbContext) {
+
+ if (secondaryFilts == null || !secondaryFilts.isSet("secondaryFilt")) {
+ return;
+ }
+ @SuppressWarnings("unchecked")
+ List<DynamicEntity> secondaryFilter = (ArrayList<DynamicEntity>)secondaryFilts.get("secondaryFilt");
+
+ for (DynamicEntity secondaryFilt : secondaryFilter) {
+ List<DynamicEntity> any = secondaryFilt.get("any");
+
+ for (DynamicEntity anyEnt : any) {
+ String clazz = anyEnt.getClass().getCanonicalName();
+ String simpleClazz = anyEnt.getClass().getSimpleName();
+
+ String nodeType = CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, simpleClazz);
+
+ DynamicType anyEntType = jaxbContext.getDynamicType(clazz);
+
+ for (String propName : anyEntType.getPropertiesNames()) {
+ // hyphencase the prop and throw it on the hash
+ if (anyEnt.isSet(propName)) {
+ secondaryFilterHash.put(nodeType + "." + CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, propName), anyEnt.get(propName));
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Remap inventory items.
+ *
+ * @param invResultItem the inv result item
+ * @param jaxbContext the jaxb context
+ * @param includeTheseVertices the include these vertices
+ * @param objectToVertMap the object to vert map
+ * @param aaiExtMap the aai ext map
+ * @return the dynamic entity
+ */
+ private DynamicEntity remapInventoryItems(DynamicEntity invResultItem, DynamicJAXBContext jaxbContext,
+ Map<String,String> includeTheseVertices, Map<Object,String> objectToVertMap, AAIExtensionMap aaiExtMap) {
+
+
+ DynamicEntity inventoryItem = jaxbContext.newDynamicEntity("inventory.aai.onap.org." + aaiExtMap.getApiVersion() + ".InventoryResponseItem");
+ Object item = invResultItem.get("item");
+ inventoryItem.set("modelName", invResultItem.get("modelName"));
+ inventoryItem.set("item", item);
+ inventoryItem.set("extraProperties", invResultItem.get("extraProperties"));
+
+ String vertexId = "";
+
+ if (objectToVertMap.containsKey(item)) {
+ vertexId = objectToVertMap.get(item);
+ }
+
+ if (includeTheseVertices.containsKey(vertexId)) {
+ if (invResultItem.isSet("inventoryResponseItems")) {
+ List<DynamicEntity> invItemList = new ArrayList<DynamicEntity>();
+ DynamicEntity inventoryItems = jaxbContext.newDynamicEntity("inventory.aai.att.com." + aaiExtMap.getApiVersion() + ".InventoryResponseItems");
+ DynamicEntity subInventoryResponseItems = invResultItem.get("inventoryResponseItems");
+ List<DynamicEntity> subInventoryResponseItemList = subInventoryResponseItems.get("inventoryResponseItem");
+ for (DynamicEntity ent : subInventoryResponseItemList) {
+ DynamicEntity invItem = remapInventoryItems(ent, jaxbContext, includeTheseVertices, objectToVertMap, aaiExtMap);
+ if (invItem != null) {
+ invItemList.add(invItem);
+ }
+ }
+ inventoryItems.set("inventoryResponseItem", invItemList);
+ inventoryItem.set("inventoryResponseItems", inventoryItems);
+ }
+ }
+ return inventoryItem;
+ }
+
+ /**
+ * Unpack result set.
+ *
+ * @param g the g
+ * @param resultSetList the result set list
+ * @param jaxbContext the jaxb context
+ * @param aaiResources the aai resources
+ * @param objectToVertMap the object to vert map
+ * @param aaiExtMap the aai ext map
+ * @return the array list
+ * @throws AAIException the AAI exception
+ */
+ // this should return an inventoryItem
+ private List<Object> unpackResultSet(List<ResultSet> resultSetList,
+ TransactionalGraphEngine engine,
+ Loader loader,
+ DBSerializer serializer) throws AAIException {
+
+ List<Object> resultList = new ArrayList<>();
+
+ for (ResultSet resultSet : resultSetList) {
+
+ if( resultSet.getVert() == null ){
+ continue;
+ }
+
+ Introspector inventoryItem = loader.introspectorFromName("inventory-response-item");
+ Introspector inventoryItems = loader.introspectorFromName("inventory-response-items");
+ // add this inventoryItem to the resultList for this level
+ resultList.add(inventoryItem.getUnderlyingObject());
+
+ Vertex vert = resultSet.getVert();
+
+ String aaiNodeType = vert.<String>property("aai-node-type").orElse(null);
+
+ if (aaiNodeType != null) {
+ Introspector thisObj = loader.introspectorFromName(aaiNodeType);
+
+ if (resultSet.getExtraPropertyHash() != null) {
+ Map<String,Object> extraProperties = resultSet.getExtraPropertyHash();
+
+ Introspector extraPropertiesEntity = loader.introspectorFromName("extra-properties");
+
+ List<Object> extraPropsList = extraPropertiesEntity.getValue("extra-property");
+
+ for (Map.Entry<String,Object> ent : extraProperties.entrySet()) {
+ String propName = ent.getKey();
+ Object propVal = ent.getValue();
+
+ Introspector extraPropEntity = loader.introspectorFromName("extra-property");
+
+ extraPropEntity.setValue("property-name", propName);
+ extraPropEntity.setValue("property-value", propVal);
+
+ extraPropsList.add(extraPropEntity.getUnderlyingObject());
+
+ }
+ inventoryItem.setValue("extra-properties", extraPropertiesEntity.getUnderlyingObject());
+ }
+
+ try {
+ serializer.dbToObject(Collections.singletonList(vert), thisObj, 0, true, "false");
+ } catch (UnsupportedEncodingException e1) {
+ throw new AAIException("AAI_5105");
+ }
+ PropertyLimitDesc propertyLimitDesc = resultSet.getPropertyLimitDesc();
+
+ if (propertyLimitDesc != null) {
+
+ if (PropertyLimitDesc.SHOW_NONE.equals(propertyLimitDesc)) {
+ HashMap<String,Object> emptyPropertyOverRideHash = new HashMap<String,Object>();
+ for (String key : thisObj.getAllKeys()) {
+ emptyPropertyOverRideHash.put(key, null);
+ }
+ filterProperties(thisObj, emptyPropertyOverRideHash);
+ } else if (PropertyLimitDesc.SHOW_ALL.equals(propertyLimitDesc)) {
+ //keep everything
+ } else if (PropertyLimitDesc.SHOW_NAME_AND_KEYS_ONLY.equals(propertyLimitDesc)) {
+ HashMap<String,Object> keysAndNamesPropHash = new HashMap<String,Object>();
+
+ for (String key : thisObj.getAllKeys()) {
+ keysAndNamesPropHash.put(key, null);
+ }
+ String namePropMetaData = thisObj.getMetadata(ObjectMetadata.NAME_PROPS);
+ if (namePropMetaData != null) {
+ String[] nameProps = namePropMetaData.split(",");
+ for (String names : nameProps) {
+ keysAndNamesPropHash.put(names, null);
+ }
+ }
+ filterProperties(thisObj, keysAndNamesPropHash);
+ }
+ } else {
+ if (resultSet.getPropertyOverRideHash() != null && resultSet.getPropertyOverRideHash().size() > 0) {
+ Map<String,Object> propertyOverRideHash = resultSet.getPropertyOverRideHash();
+ if (propertyOverRideHash.containsKey("persona-model-id")) {
+ propertyOverRideHash.remove("persona-model-id");
+ propertyOverRideHash.put("model-invariant-id", null);
+ }
+ for (String key : thisObj.getAllKeys()) {
+ propertyOverRideHash.put(key, null);
+ }
+ filterProperties(thisObj, propertyOverRideHash);
+ } else {
+ //keep everything
+ }
+ }
+
+ if (thisObj != null) {
+ inventoryItem.setValue("item", thisObj.getUnderlyingObject());
+
+ String modelName = null;
+ try {
+ String modelInvariantIdLocal = (String)vert.<String>property("model-invariant-id-local").orElse(null); // this one points at a model
+ String modelVersionIdLocal = (String)vert.<String>property("model-version-id-local").orElse(null); // this one points at a model-ver
+
+ if ( (modelInvariantIdLocal != null && modelVersionIdLocal != null)
+ && (modelInvariantIdLocal.length() > 0 && modelVersionIdLocal.length() > 0) ) {
+ HashMap<String,Object> modelLookupHash = new HashMap<String,Object>();
+
+ Introspector modelVer = loader.introspectorFromName("model-ver");
+ modelVer.setValue("model-version-id", modelVersionIdLocal);
+ QueryBuilder builder = engine.getQueryBuilder().createDBQuery(modelVer);
+
+ List<Vertex> modelVerVerts = builder.toList();
+ if (modelVerVerts.size() != 1) {
+ throw new AAIException("AAI_6112");
+ }
+ Vertex modelVerVert = modelVerVerts.get(0);
+
+ modelName = modelVerVert.<String>property("model-name").orElse(null);
+
+ if (modelName != null && modelName.length() > 0) {
+ inventoryItem.setValue("model-name", modelName);
+ }
+ }
+ } catch (DynamicException e) {
+ ; // it's ok, dynamic object might not have these fields
+ } catch (AAIException e) {
+ if (e.getErrorObject().getErrorCode().equals("6114")) {
+ // it's ok, couldn't find a matching model
+ } else {
+ throw e;
+ }
+ }
+
+ if (resultSet.getSubResultSet() != null) {
+ List<ResultSet> subResultSet = resultSet.getSubResultSet();
+ if (subResultSet != null && subResultSet.size() > 0 ) {
+ List<Object> res = unpackResultSet(subResultSet, engine, loader, serializer);
+ if (res.size() > 0) {
+ inventoryItems.setValue("inventory-response-item", res);
+ inventoryItem.setValue("inventory-response-items", inventoryItems.getUnderlyingObject());
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return resultList;
+ }
+
+ private void filterProperties(Introspector thisObj, Map<String, Object> override) {
+
+ thisObj.getProperties().stream().filter(x -> {
+ return !override.containsKey(x);
+ }).forEach(prop -> {
+ if (thisObj.isSimpleType(prop)) {
+ thisObj.setValue(prop, null);
+ }
+ });
+
+ }
+
+ /**
+ * Gets the media type.
+ *
+ * @param mediaTypeList the media type list
+ * @return the media type
+ */
+ protected String getMediaType(List <MediaType> mediaTypeList) {
+ String mediaType = MediaType.APPLICATION_JSON; // json is the default
+ for (MediaType mt : mediaTypeList) {
+ if (MediaType.APPLICATION_XML_TYPE.isCompatible(mt)) {
+ mediaType = MediaType.APPLICATION_XML;
+ }
+ }
+ return mediaType;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
new file mode 100644
index 0000000..733383a
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
@@ -0,0 +1,27 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors;
+
+public class AAIHeaderProperties {
+
+ public static final String REQUEST_CONTEXT = "aai-request-context";
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java
new file mode 100644
index 0000000..155d387
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSInInterceptor.java
@@ -0,0 +1,286 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors;
+
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.core.MediaType;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.cxf.helpers.CastUtils;
+import org.apache.cxf.interceptor.LoggingMessage;
+import org.apache.cxf.jaxrs.interceptor.JAXRSInInterceptor;
+import org.apache.cxf.message.Message;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.rest.util.EchoResponse;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.FormatDate;
+import org.onap.aai.util.HbaseSaltPrefixer;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.slf4j.MDC;
+
+public class AAILogJAXRSInInterceptor extends JAXRSInInterceptor {
+
+ protected final String COMPONENT = "aairest";
+ protected final String CAMEL_REQUEST ="CamelHttpUrl";
+ private static final Pattern uuidPattern = Pattern.compile("^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$");
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAILogJAXRSInInterceptor.class);
+
+ /**
+ * {@inheritDoc}
+ */
+ public void handleMessage(Message message) {
+
+ boolean go = false;
+ String uri = null;
+ String query = null;
+ try {
+
+ uri = (String)message.get(CAMEL_REQUEST);
+ if (uri != null) {
+ query = (String)message.get(Message.QUERY_STRING);
+ }
+
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_INTERCEPTOR).equalsIgnoreCase("true") &&
+ AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_ENABLED).equalsIgnoreCase("true")) {
+ go = true;
+ message.getExchange().put("AAI_LOGGING_HBASE_ENABLED", 1);
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_LOGREQUEST).equalsIgnoreCase("true") ) {
+ message.getExchange().put("AAI_LOGGING_HBASE_LOGREQUEST", 1);
+ }
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_HBASE_LOGRESPONSE).equalsIgnoreCase("true") ) {
+ message.getExchange().put("AAI_LOGGING_HBASE_LOGRESPONSE", 1);
+ }
+ }
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_ENABLED).equalsIgnoreCase("true") ) {
+ go = true;
+ message.getExchange().put("AAI_LOGGING_TRACE_ENABLED", 1);
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_LOGREQUEST).equalsIgnoreCase("true") ) {
+ message.getExchange().put("AAI_LOGGING_TRACE_LOGREQUEST", 1);
+ }
+ if (AAIConfig.get(AAIConstants.AAI_LOGGING_TRACE_LOGRESPONSE).equalsIgnoreCase("true") ) {
+ message.getExchange().put("AAI_LOGGING_TRACE_LOGRESPONSE", 1);
+ }
+ }
+ } catch (AAIException e1) {
+ ErrorLogHelper.logException(e1);
+ }
+
+ if ((uri != null) && (uri.contains(EchoResponse.echoPath))) {
+ // if it's a health check, we don't want to log ANYTHING if it's a lightweight one
+ if (query == null) {
+ if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED")) {
+ message.getExchange().remove("AAI_LOGGING_HBASE_ENABLED");
+ }
+ if (message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
+ message.getExchange().remove("AAI_LOGGING_TRACE_ENABLED");
+ }
+ go = false;
+ }
+ }
+ else if ((uri != null) && (uri.contains("/translog/"))) {
+ // if it's a translog query, we don't want to log the responses
+ if (message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGRESPONSE")) {
+ message.getExchange().remove("AAI_LOGGING_HBASE_LOGRESPONSE");
+ }
+ if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGRESPONSE")) {
+ message.getExchange().remove("AAI_LOGGING_TRACE_LOGRESPONSE");
+ }
+ }
+
+ if (go == false) { // there's nothing to do
+ return;
+ }
+
+ // DONE: get a TXID based on hostname, time (YYYYMMDDHHMMSSMILLIS, and LoggingMessage.nextId(); 20150326145301-1
+ String now = genDate();
+
+ message.getExchange().put("AAI_RQST_TM", now);
+
+ String id = (String)message.getExchange().get(LoggingMessage.ID_KEY);
+
+ String fullId = null;
+ try {
+ if (id == null) {
+ id = LoggingMessage.nextId();
+ }
+ fullId = AAIConfig.get(AAIConstants.AAI_NODENAME) + "-" + now + "-" + id;
+ fullId = HbaseSaltPrefixer.getInstance().prependSalt(fullId);
+ message.getExchange().put(LoggingMessage.ID_KEY, fullId);
+ } catch (AAIException e1) {
+ LOGGER.debug("config problem", e1);
+ }
+
+ if (fullId == null) {
+ fullId = now + "-" + id;
+ fullId = HbaseSaltPrefixer.getInstance().prependSalt(fullId);
+ }
+ message.put(LoggingMessage.ID_KEY, fullId);
+ final LoggingMessage buffer = new LoggingMessage("Message", fullId);
+
+ Integer responseCode = (Integer)message.get(Message.RESPONSE_CODE);
+ if (responseCode != null) {
+ buffer.getResponseCode().append(responseCode);
+ }
+
+ String encoding = (String)message.get(Message.ENCODING);
+
+ if (encoding != null) {
+ buffer.getEncoding().append(encoding);
+ }
+ String httpMethod = (String)message.get(Message.HTTP_REQUEST_METHOD);
+ if (httpMethod != null) {
+ buffer.getHttpMethod().append(httpMethod);
+ }
+
+ String ct = (String)message.get(Message.CONTENT_TYPE);
+ if (ct != null) {
+ if ("*/*".equals(ct)) {
+ message.put(Message.CONTENT_TYPE, MediaType.APPLICATION_JSON);
+ ct = MediaType.APPLICATION_JSON;
+ }
+ buffer.getContentType().append(ct);
+
+ }
+ Object headers = message.get(Message.PROTOCOL_HEADERS);
+ if (headers != null) {
+ buffer.getHeader().append(headers);
+
+ Map<String, List<String>> headersList = CastUtils.cast((Map<?, ?>)message.get(Message.PROTOCOL_HEADERS));
+ String transId = "";
+ List<String> xt = headersList.get("X-TransactionId");
+ String newTransId = transId;
+ boolean missingTransId = false;
+ boolean replacedTransId = false;
+ String logMsg = null;
+ if (xt != null) {
+ for (String transIdValue : xt) {
+ transId = transIdValue;
+ }
+ Matcher matcher = uuidPattern.matcher(transId);
+ if (!matcher.find()) {
+ replacedTransId = true;
+ // check if there's a colon, and check the first group?
+ if (transId.contains(":")) {
+ String[] uuidParts = transId.split(":");
+ Matcher matcher2 = uuidPattern.matcher(uuidParts[0]);
+ if (matcher2.find()) {
+ newTransId = uuidParts[0];
+ } else {
+ // punt, we tried to find it, it has a colon but no UUID-1
+ newTransId = UUID.randomUUID().toString();
+ }
+ } else {
+ newTransId = UUID.randomUUID().toString();
+ }
+ }
+ } else {
+ newTransId = UUID.randomUUID().toString();
+ missingTransId = true;
+ }
+
+ if (missingTransId || replacedTransId) {
+ List<String> txList = new ArrayList<String>();
+ txList.add(newTransId);
+ headersList.put("X-TransactionId", txList);
+ if (missingTransId) {
+ logMsg = "Missing requestID. Assigned " + newTransId;
+ } else if (replacedTransId) {
+ logMsg = "Replaced invalid requestID of " + transId + " Assigned " + newTransId;
+ }
+ MDC.put("RequestId",newTransId);
+ }
+ else {
+ MDC.put("RequestId",transId);
+ }
+
+
+ List<String> fromAppIdList = headersList.get("X-FromAppId");
+ if (fromAppIdList != null) {
+ String fromAppId = null;
+ for (String fromAppIdValue : fromAppIdList) {
+ fromAppId = fromAppIdValue;
+ }
+ MDC.put("PartnerName",fromAppId);
+ }
+
+ List<String> contentType = headersList.get("Content-Type");
+ if (contentType == null) {
+ ct = (String)message.get(Message.CONTENT_TYPE);
+ headersList.put(Message.CONTENT_TYPE, Collections.singletonList(ct));
+ }
+
+ LOGGER.auditEvent("REST " + httpMethod + " " + ((query != null)? uri+"?"+query : uri) + " HbaseTxId=" + fullId);
+ LOGGER.info(logMsg);
+ }
+
+
+ if (uri != null) {
+ buffer.getAddress().append(uri);
+ if (query != null) {
+ buffer.getAddress().append("?").append(query);
+ }
+ }
+
+ InputStream is = message.getContent(InputStream.class);
+ if (is != null) {
+ try {
+ String currentPayload = IOUtils.toString(is, "UTF-8");
+ IOUtils.closeQuietly(is);
+ buffer.getPayload().append(currentPayload);
+ is = IOUtils.toInputStream(currentPayload, "UTF-8");
+ message.setContent(InputStream.class, is);
+ IOUtils.closeQuietly(is);
+ } catch (Exception e) {
+ // It's ok to not have request input content
+ // throw new Fault(e);
+ }
+ }
+
+ // this will be saved in the message exchange, and can be pulled out later...
+ message.getExchange().put(fullId + "_REQUEST", buffer.toString());
+ }
+
+ /**
+ * Gen date.
+ *
+ * @param aaiLogger the aai logger
+ * @param logline the logline
+ * @return the string
+ */
+ protected String genDate() {
+ FormatDate fd = new FormatDate("YYMMdd-HH:mm:ss:SSS");
+ return fd.getDateTime();
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java
new file mode 100644
index 0000000..a280023
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/AAILogJAXRSOutInterceptor.java
@@ -0,0 +1,303 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors;
+
+import java.io.OutputStream;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.cxf.helpers.CastUtils;
+import org.apache.cxf.interceptor.LoggingMessage;
+import org.apache.cxf.io.CacheAndWriteOutputStream;
+import org.apache.cxf.io.CachedOutputStream;
+import org.apache.cxf.io.CachedOutputStreamCallback;
+import org.apache.cxf.jaxrs.interceptor.JAXRSOutInterceptor;
+import org.apache.cxf.message.Message;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.FormatDate;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+// right after the request is complete, there may be content
+public class AAILogJAXRSOutInterceptor extends JAXRSOutInterceptor {
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAILogJAXRSOutInterceptor.class);
+
+ protected final String COMPONENT = "aairest";
+ protected final String CAMEL_REQUEST = "CamelHttpUrl";
+
+ /**
+ * {@inheritDoc}
+ */
+ public void handleMessage(Message message) {
+
+ String fullId = (String) message.getExchange().get(LoggingMessage.ID_KEY);
+
+ Map<String, List<String>> headers = CastUtils.cast((Map<?, ?>) message.get(Message.PROTOCOL_HEADERS));
+ if (headers == null) {
+ headers = new HashMap<String, List<String>>();
+ }
+
+ headers.put("X-AAI-TXID", Collections.singletonList(fullId));
+ message.put(Message.PROTOCOL_HEADERS, headers);
+
+ Message outMessage = message.getExchange().getOutMessage();
+ final OutputStream os = outMessage.getContent(OutputStream.class);
+ if (os == null) {
+ return;
+ }
+
+ // we only want to register the callback if there is good reason for it.
+ if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED") || message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
+
+ final CacheAndWriteOutputStream newOut = new CacheAndWriteOutputStream(os);
+ message.setContent(OutputStream.class, newOut);
+ newOut.registerCallback(new LoggingCallback(message, os));
+ }
+
+ }
+
+ class LoggingCallback implements CachedOutputStreamCallback {
+
+ private final Message message;
+ private final OutputStream origStream;
+
+ public LoggingCallback(final Message msg, final OutputStream os) {
+ this.message = msg;
+ this.origStream = os;
+ }
+
+ public void onFlush(CachedOutputStream cos) {
+
+ }
+
+ public void onClose(CachedOutputStream cos) {
+
+ String getValue = "";
+ String postValue = "";
+ String logValue = "";
+
+ try {
+ logValue = AAIConfig.get("aai.transaction.logging");
+ getValue = AAIConfig.get("aai.transaction.logging.get");
+ postValue = AAIConfig.get("aai.transaction.logging.post");
+ } catch (AAIException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED") && !message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
+ return;
+ }
+
+ String fullId = (String) message.getExchange().get(LoggingMessage.ID_KEY);
+
+ Message inMessage = message.getExchange().getInMessage();
+ String transId = null;
+ String fromAppId = null;
+
+ Map<String, List<String>> headersList = CastUtils.cast((Map<?, ?>) inMessage.get(Message.PROTOCOL_HEADERS));
+ if (headersList != null) {
+ List<String> xt = headersList.get("X-TransactionId");
+ if (xt != null) {
+ for (String transIdValue : xt) {
+ transId = transIdValue;
+ }
+ }
+ List<String> fa = headersList.get("X-FromAppId");
+ if (fa != null) {
+ for (String fromAppIdValue : fa) {
+
+ fromAppId = fromAppIdValue;
+ }
+ }
+ }
+
+ String httpMethod = (String) inMessage.get(Message.HTTP_REQUEST_METHOD);
+
+ String uri = (String) inMessage.get(CAMEL_REQUEST);
+ String fullUri = uri;
+ if (uri != null) {
+ String query = (String) message.get(Message.QUERY_STRING);
+ if (query != null) {
+ fullUri = uri + "?" + query;
+ }
+ }
+
+ String request = (String) message.getExchange().get(fullId + "_REQUEST");
+
+ Message outMessage = message.getExchange().getOutMessage();
+
+ final LoggingMessage buffer = new LoggingMessage("OUTMessage", fullId);
+
+ // should we check this, and make sure it's not an error?
+ Integer responseCode = (Integer) outMessage.get(Message.RESPONSE_CODE);
+ if (responseCode == null) {
+ responseCode = 200; // this should never happen, but just in
+ // case we don't get one
+ }
+ buffer.getResponseCode().append(responseCode);
+
+ String encoding = (String) outMessage.get(Message.ENCODING);
+
+ if (encoding != null) {
+ buffer.getEncoding().append(encoding);
+ }
+
+ String ct = (String) outMessage.get(Message.CONTENT_TYPE);
+ if (ct != null) {
+ buffer.getContentType().append(ct);
+ }
+
+ Object headers = outMessage.get(Message.PROTOCOL_HEADERS);
+ if (headers != null) {
+ buffer.getHeader().append(headers);
+ }
+
+ Boolean ss = false;
+ if (responseCode >= 200 && responseCode <= 299) {
+ ss = true;
+ }
+ String response = buffer.toString();
+
+ // this should have been set by the in interceptor
+ String rqstTm = (String) message.getExchange().get("AAI_RQST_TM");
+
+ // just in case it wasn't, we'll put this here. not great, but it'll
+ // have a val.
+ if (rqstTm == null) {
+ rqstTm = genDate();
+ }
+
+
+ String respTm = genDate();
+
+ try {
+ String actualRequest = request;
+ StringBuilder builder = new StringBuilder();
+ cos.writeCacheTo(builder, 100000);
+ // here comes my xml:
+ String payload = builder.toString();
+
+ String actualResponse = response;
+ if (payload == null) {
+
+ } else {
+ actualResponse = response + payload;
+ }
+
+ // we only log to AAI log if it's eanbled in the config props
+ // file
+ if (message.getExchange().containsKey("AAI_LOGGING_TRACE_ENABLED")) {
+
+ if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGREQUEST")) {
+
+ // strip newlines from request
+ String traceRequest = actualRequest;
+ traceRequest = traceRequest.replace("\n", " ");
+ traceRequest = traceRequest.replace("\r", "");
+ traceRequest = traceRequest.replace("\t", "");
+ LOGGER.debug(traceRequest);
+ }
+ if (message.getExchange().containsKey("AAI_LOGGING_TRACE_LOGRESPONSE")) {
+ // strip newlines from response
+ String traceResponse = actualResponse;
+ traceResponse = traceResponse.replace("\n", " ");
+ traceResponse = traceResponse.replace("\r", "");
+ traceResponse = traceResponse.replace("\t", "");
+
+ LOGGER.debug(traceResponse);
+ }
+ }
+
+ // we only log to HBASE if it's enabled in the config props file
+ // TODO: pretty print XML/JSON. we might need to get the payload
+ // and envelope seperately
+ if (message.getExchange().containsKey("AAI_LOGGING_HBASE_ENABLED")) {
+ if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGREQUEST")) {
+ actualRequest = "loggingDisabled";
+ }
+ if (!message.getExchange().containsKey("AAI_LOGGING_HBASE_LOGRESPONSE")) {
+ actualResponse = "loggingDisabled";
+ }
+
+ LOGGER.debug("action={}, urlin={}, HbTransId={}", httpMethod, fullUri, fullId);
+
+ if (logValue.equals("false")) {
+ } else if (getValue.equals("false") && httpMethod.equals("GET")) {
+ } else if (postValue.equals("false") && httpMethod.equals("POST")) {
+ } else {
+ putTransaction(transId, responseCode.toString(), rqstTm, respTm, fromAppId + ":" + transId, fullUri, httpMethod, request, response, actualResponse);
+
+ }
+ }
+ } catch (Exception ex) {
+ // ignore
+ }
+
+ message.setContent(OutputStream.class, origStream);
+
+ LOGGER.auditEvent("HTTP Response Code: {}", responseCode.toString());
+ }
+
+ }
+
+ protected String genDate() {
+ FormatDate fd = new FormatDate("YYMMdd-HH:mm:ss:SSS");
+ return fd.getDateTime();
+ }
+
+ public String putTransaction(String tid, String status, String rqstTm, String respTm, String srcId, String rsrcId, String rsrcType, String rqstBuf, String respBuf, String actualResponse) {
+ String tm = null;
+
+ if (tid == null || "".equals(tid)) {
+ tm = this.genDate();
+ tid = tm + "-";
+ }
+
+ String htid = tid;
+
+ if (rqstTm == null || "".equals(rqstTm)) {
+ rqstTm = tm;
+ }
+
+ if (respTm == null || "".equals(respTm)) {
+ respTm = tm;
+ }
+
+ try {
+ LOGGER.debug(" transactionId:" + tid + " status: " + status + " rqstDate: " + rqstTm + " respDate: " + respTm + " sourceId: " + srcId + " resourceId: "
+ + rsrcId + " resourceType: " + rsrcType + " payload rqstBuf: " + rqstBuf + " payload respBuf: " + respBuf + " Payload Error Messages: " + actualResponse);
+ return htid;
+ } catch (Exception e) {
+ ErrorLogHelper.logError("AAI_4000", "Exception updating HBase:");
+ return htid;
+ }
+
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java
new file mode 100644
index 0000000..30382e4
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/PostAaiAjscInterceptor.java
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors;
+
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import ajsc.beans.interceptors.AjscInterceptor;
+
+public class PostAaiAjscInterceptor implements AjscInterceptor {
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(PostAaiAjscInterceptor.class);
+
+ private static class LazyAaiAjscInterceptor {
+ public static final PostAaiAjscInterceptor INSTANCE = new PostAaiAjscInterceptor();
+ }
+
+ public static PostAaiAjscInterceptor getInstance() {
+ return LazyAaiAjscInterceptor.INSTANCE;
+ }
+
+ @Override
+ public boolean allowOrReject(HttpServletRequest req, HttpServletResponse resp, Map<?, ?> paramMap)
+ throws Exception {
+ final String responseCode = LoggingContext.responseCode();
+
+ if (responseCode != null && responseCode.startsWith("ERR.")) {
+ LoggingContext.statusCode(StatusCode.ERROR);
+ LOGGER.error(req.getRequestURL() + " call failed with responseCode=" + responseCode);
+ } else {
+ LoggingContext.statusCode(StatusCode.COMPLETE);
+ LOGGER.info(req.getRequestURL() + " call succeeded");
+ }
+
+ LoggingContext.clear();
+ return true;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java b/aai-traversal/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java
new file mode 100644
index 0000000..7d1ae73
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/interceptors/PreAaiAjscInterceptor.java
@@ -0,0 +1,55 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.interceptors;
+
+import java.util.Map;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.onap.aai.logging.LoggingContext;
+
+import ajsc.beans.interceptors.AjscInterceptor;
+
+public class PreAaiAjscInterceptor implements AjscInterceptor {
+
+ private static class LazyAaiAjscInterceptor {
+ public static final PreAaiAjscInterceptor INSTANCE = new PreAaiAjscInterceptor();
+ }
+
+ public static PreAaiAjscInterceptor getInstance() {
+ return LazyAaiAjscInterceptor.INSTANCE;
+ }
+
+ @Override
+ public boolean allowOrReject(HttpServletRequest req, HttpServletResponse resp, Map<?, ?> paramMap)
+ throws Exception {
+
+ LoggingContext.init();
+
+ LoggingContext.requestId(req.getHeader("X-TransactionId"));
+ LoggingContext.partnerName(req.getHeader("X-FromAppId"));
+ LoggingContext.serviceName(req.getMethod() + " " + req.getRequestURI().toString());
+
+ return true;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/ExceptionHandler.java b/aai-traversal/src/main/java/org/onap/aai/rest/ExceptionHandler.java
new file mode 100644
index 0000000..c20a370
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/ExceptionHandler.java
@@ -0,0 +1,130 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.sun.istack.SAXParseException2;
+
+/**
+ * The Class ExceptionHandler.
+ */
+@Provider
+public class ExceptionHandler implements ExceptionMapper<Exception> {
+
+ @Context
+ private HttpServletRequest request;
+
+ @Context
+ private HttpHeaders headers;
+
+ /**
+ * @{inheritDoc}
+ */
+ @Override
+ public Response toResponse(Exception exception) {
+
+ Response response = null;
+ ArrayList<String> templateVars = new ArrayList<String>();
+
+ //the general case is that cxf will give us a WebApplicationException
+ //with a linked exception
+ if (exception instanceof WebApplicationException) {
+ WebApplicationException e = (WebApplicationException) exception;
+ if (e.getCause() != null) {
+ if (e.getCause() instanceof SAXParseException2) {
+ templateVars.add("UnmarshalException");
+ AAIException ex = new AAIException("AAI_4007", exception);
+ response = Response
+ .status(400)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ }
+ }
+ } else if (exception instanceof JsonParseException) {
+ //jackson does it differently so we get the direct JsonParseException
+ templateVars.add("JsonParseException");
+ AAIException ex = new AAIException("AAI_4007", exception);
+ response = Response
+ .status(400)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ } else if (exception instanceof JsonMappingException) {
+ //jackson does it differently so we get the direct JsonParseException
+ templateVars.add("JsonMappingException");
+ AAIException ex = new AAIException("AAI_4007", exception);
+ response = Response
+ .status(400)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ }
+
+ // it didn't get set above, we wrap a general fault here
+ if (response == null) {
+
+ Exception actual_e = exception;
+ if (exception instanceof WebApplicationException) {
+ WebApplicationException e = (WebApplicationException) exception;
+ response = e.getResponse();
+ } else {
+ templateVars.add(request.getMethod());
+ templateVars.add("unknown");
+ AAIException ex = new AAIException("AAI_4000", actual_e);
+ List<MediaType> mediaTypes = headers.getAcceptableMediaTypes();
+ int setError = 0;
+
+ for (MediaType mediaType : mediaTypes) {
+ if (MediaType.APPLICATION_XML_TYPE.isCompatible(mediaType)) {
+ response = Response
+ .status(400)
+ .type(MediaType.APPLICATION_XML_TYPE)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ setError = 1;
+ }
+ }
+ if (setError == 0) {
+ response = Response
+ .status(400)
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ }
+ }
+ }
+ return response;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java
new file mode 100644
index 0000000..8109f7e
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/QueryConsumer.java
@@ -0,0 +1,197 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.Encoded;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.Version;
+import org.onap.aai.parsers.query.QueryParser;
+import org.onap.aai.rest.db.HttpEntry;
+import org.onap.aai.rest.search.GenericQueryProcessor;
+import org.onap.aai.rest.search.QueryProcessorType;
+import org.onap.aai.restcore.HttpMethod;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.restcore.util.URITools;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.Format;
+import org.onap.aai.serialization.queryformats.FormatFactory;
+import org.onap.aai.serialization.queryformats.Formatter;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+@Path("{version: v9|v1[01]}/query")
+public class QueryConsumer extends RESTAPI {
+
+ /** The introspector factory type. */
+ private ModelType introspectorFactoryType = ModelType.MOXY;
+
+ private QueryProcessorType processorType = QueryProcessorType.LOCAL_GROOVY;
+ /** The query style. */
+ private QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+ @PUT
+ @Consumes({ MediaType.APPLICATION_JSON})
+ @Produces({ MediaType.APPLICATION_JSON})
+ public Response executeQuery(String content, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,@DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+
+ String sourceOfTruth = headers.getRequestHeaders().getFirst("X-FromAppId");
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ String queryProcessor = headers.getRequestHeaders().getFirst("QueryProcessor");
+ QueryProcessorType processorType = this.processorType;
+ Response response = null;
+ TransactionalGraphEngine dbEngine = null;
+ try {
+ this.checkQueryParams(info.getQueryParameters());
+ Format format = Format.valueOf(queryFormat);
+ if (queryProcessor != null) {
+ processorType = QueryProcessorType.valueOf(queryProcessor);
+ }
+ SubGraphStyle subGraphStyle = SubGraphStyle.valueOf(subgraph);
+ JsonParser parser = new JsonParser();
+
+ JsonObject input = parser.parse(content).getAsJsonObject();
+
+ JsonElement startElement = input.get("start");
+ JsonElement queryElement = input.get("query");
+ JsonElement gremlinElement = input.get("gremlin");
+ List<URI> startURIs = new ArrayList<>();
+ String queryURI = "";
+ String gremlin = "";
+
+ Version version = Version.valueOf(versionParam);
+ DBConnectionType type = this.determineConnectionType(sourceOfTruth, realTime);
+ HttpEntry httpEntry = new HttpEntry(version, introspectorFactoryType, queryStyle, type);
+ dbEngine = httpEntry.getDbEngine();
+
+ if (startElement != null) {
+
+ if (startElement.isJsonArray()) {
+ for (JsonElement element : startElement.getAsJsonArray()) {
+ startURIs.add(new URI(element.getAsString()));
+ }
+ } else {
+ startURIs.add(new URI(startElement.getAsString()));
+ }
+ }
+
+ if (queryElement != null) {
+ queryURI = queryElement.getAsString();
+ }
+ if (gremlinElement != null) {
+ gremlin = gremlinElement.getAsString();
+ }
+ URI queryURIObj = new URI(queryURI);
+ GenericQueryProcessor processor = null;
+
+ if (!startURIs.isEmpty()) {
+ Set<Vertex> vertexSet = new LinkedHashSet<>();
+ QueryParser uriQuery;
+ List<Vertex> vertices;
+ for (URI startUri : startURIs) {
+ uriQuery = dbEngine.getQueryBuilder().createQueryFromURI(startUri, URITools.getQueryMap(startUri));
+ vertices = uriQuery.getQueryBuilder().toList();
+ vertexSet.addAll(vertices);
+ }
+
+ processor = new GenericQueryProcessor.Builder(dbEngine)
+ .startFrom(vertexSet).queryFrom(queryURIObj)
+ .processWith(processorType).create();
+ } else if (!queryURI.equals("")){
+ processor = new GenericQueryProcessor.Builder(dbEngine)
+ .queryFrom(queryURIObj)
+ .processWith(processorType).create();
+ } else {
+ processor = new GenericQueryProcessor.Builder(dbEngine)
+ .queryFrom(gremlin)
+ .processWith(processorType).create();
+ }
+ String result = "";
+ List<Object> vertices = processor.execute(subGraphStyle);
+ DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, sourceOfTruth);
+ FormatFactory ff = new FormatFactory(httpEntry.getLoader(), serializer);
+
+ Formatter formater = ff.get(format, info.getQueryParameters());
+
+ result = formater.output(vertices).toString();
+
+ response = Response.status(Status.OK)
+ .type(MediaType.APPLICATION_JSON)
+ .entity(result).build();
+
+ } catch (AAIException e) {
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, e);
+ } catch (Exception e ) {
+ AAIException ex = new AAIException("AAI_4000", e);
+
+ response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, ex);
+ } finally {
+ if (dbEngine != null) {
+ dbEngine.rollback();
+ }
+ }
+
+ return response;
+ }
+
+ public void checkQueryParams(MultivaluedMap<String, String> params) throws AAIException {
+
+ if (params.containsKey("depth") && params.getFirst("depth").matches("\\d+")) {
+ String depth = params.getFirst("depth");
+ Integer i = Integer.parseInt(depth);
+ if (i > 1) {
+ throw new AAIException("AAI_3303");
+ }
+ }
+
+
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
new file mode 100644
index 0000000..0188142
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/retired/RetiredConsumer.java
@@ -0,0 +1,144 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.retired;
+
+import java.util.ArrayList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.cxf.jaxrs.ext.PATCH;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.util.AAIConfig;
+
+/**
+ * The Class RetiredConsumer.
+ */
+public abstract class RetiredConsumer extends RESTAPI {
+
+ /**
+ * Creates the message get.
+ *
+ * @param versionParam the version param
+ * @param headers the headers
+ * @param info the info
+ * @param req the req
+ * @return the response
+ */
+ @GET
+ @Path("/{uri:.*}")
+ public Response createMessageGet(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ return createMessage(versionParam, headers, info, req);
+ }
+
+ /**
+ * Creates the message delete.
+ *
+ * @param versionParam the version param
+ * @param headers the headers
+ * @param info the info
+ * @param req the req
+ * @return the response
+ */
+ @DELETE
+ @Path("/{uri:.*}")
+ public Response createMessageDelete(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ return createMessage(versionParam, headers, info, req);
+ }
+
+ /**
+ * Creates the message post.
+ *
+ * @param versionParam the version param
+ * @param headers the headers
+ * @param info the info
+ * @param req the req
+ * @return the response
+ */
+ @POST
+ @Path("/{uri:.*}")
+ public Response createMessagePost(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ return createMessage(versionParam, headers, info, req);
+ }
+
+ @PATCH
+ @Path("/{uri:.*}")
+ public Response createMessagePatch(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ return createMessage(versionParam, headers, info, req);
+ }
+ /**
+ * Creates the message put.
+ *
+ * @param versionParam the version param
+ * @param headers the headers
+ * @param info the info
+ * @param req the req
+ * @return the response
+ */
+ @PUT
+ @Path("/{uri:.*}")
+ public Response createMessagePut(@PathParam("version")String versionParam, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+ return createMessage(versionParam, headers, info, req);
+ }
+
+
+ /**
+ * Creates the message.
+ *
+ * @param versionParam the version param
+ * @param headers the headers
+ * @param info the info
+ * @param req the req
+ * @return the response
+ */
+ private Response createMessage(String versionParam, HttpHeaders headers, UriInfo info, HttpServletRequest req) {
+ AAIException e = new AAIException("AAI_3007");
+
+ ArrayList<String> templateVars = new ArrayList<String>();
+
+ if (templateVars.size() == 0) {
+ templateVars.add("PUT");
+ templateVars.add(info.getPath().toString());
+ templateVars.add(versionParam);
+ templateVars.add(AAIConfig.get("aai.default.api.version", ""));
+ }
+
+ Response response = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e,
+ templateVars)).build();
+
+ return response;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java b/aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java
new file mode 100644
index 0000000..8e80a7b
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/retired/V3ThroughV7Consumer.java
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.retired;
+
+import javax.ws.rs.Path;
+
+@Path("{version: v[3-7]}")
+public class V3ThroughV7Consumer extends RetiredConsumer {
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java b/aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java
new file mode 100644
index 0000000..98be455
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/retired/V7V8NamedQueries.java
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.retired;
+
+import javax.ws.rs.Path;
+
+@Path("{version: v[78]}/service-design-and-creation/named-queries")
+public class V7V8NamedQueries extends RetiredConsumer {
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
new file mode 100644
index 0000000..9f6f3aa
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
@@ -0,0 +1,231 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.io.FileNotFoundException;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Vector;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.restcore.util.URITools;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+
+import jersey.repackaged.com.google.common.base.Joiner;
+
+public abstract class GenericQueryProcessor {
+
+ protected final Optional<URI> uri;
+ protected final MultivaluedMap<String, String> queryParams;
+ protected final Optional<Collection<Vertex>> vertices;
+ protected static Pattern p = Pattern.compile("query/(.*+)");
+ protected Optional<String> gremlin;
+ protected final TransactionalGraphEngine dbEngine;
+ protected static GremlinServerSingleton gremlinServerSingleton = GremlinServerSingleton.getInstance();
+ protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance();
+ protected final boolean isGremlin;
+
+ protected GenericQueryProcessor(Builder builder) {
+ this.uri = builder.getUri();
+ this.dbEngine = builder.getDbEngine();
+ this.vertices = builder.getVertices();
+ this.gremlin = builder.getGremlin();
+ this.isGremlin = builder.isGremlin();
+ if (uri.isPresent()) {
+ queryParams = URITools.getQueryMap(uri.get());
+ } else {
+ queryParams = new MultivaluedHashMap<>();
+ }
+ }
+
+ protected abstract GraphTraversal<?,?> runQuery(String query, Map<String, Object> params);
+
+ protected List<Object> processSubGraph(SubGraphStyle style, GraphTraversal<?,?> g) {
+ final List<Object> resultVertices = new Vector<>();
+ g.store("x");
+
+ if (SubGraphStyle.prune.equals(style) || SubGraphStyle.star.equals(style)) {
+ g.barrier().bothE();
+ if (SubGraphStyle.prune.equals(style)) {
+ g.where(__.otherV().where(P.within("x")));
+ }
+ g.dedup().subgraph("subGraph").cap("subGraph").map(x -> (Graph)x.get()).next().traversal().V().forEachRemaining(x -> {
+ resultVertices.add(x);
+ });
+ } else {
+ resultVertices.addAll(g.toList());
+ }
+ return resultVertices;
+ }
+
+ public List<Object> execute(SubGraphStyle style) throws FileNotFoundException {
+ final List<Object> resultVertices;
+
+ Pair<String, Map<String, Object>> tuple = this.createQuery();
+ String query = tuple.getValue0();
+ Map<String, Object> params = tuple.getValue1();
+
+ if (query.equals("") && (vertices.isPresent() && vertices.get().isEmpty())) {
+ //nothing to do, just exit
+ return new ArrayList<>();
+ }
+ GraphTraversal<?,?> g = this.runQuery(query, params);
+
+ resultVertices = this.processSubGraph(style, g);
+
+ return resultVertices;
+ }
+
+ protected Pair<String, Map<String, Object>> createQuery() {
+ Map<String, Object> params = new HashMap<>();
+ String query = "";
+ if (!this.isGremlin) {
+ Matcher m = p.matcher(uri.get().getPath());
+ String queryName = "";
+ if (m.find()) {
+ queryName = m.group(1);
+ }
+
+ for (String key : queryParams.keySet()) {
+ params.put(key, queryParams.getFirst(key));
+ }
+
+ query = gremlinServerSingleton.getStoredQuery(queryName);
+ if (query == null) {
+ query = "";
+ } else {
+ query = queryBuilderSingleton.executeTraversal(dbEngine, query, params);
+ }
+
+
+ List<Object> ids = new ArrayList<>();
+
+ if (vertices.isPresent() && !vertices.get().isEmpty()) {
+ for (Vertex v : vertices.get()) {
+ ids.add(v.id());
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append("[");
+ sb.append(Joiner.on(",").join(ids));
+ sb.append("]");
+ String startPrefix = "aaiStartQuery = " + sb.toString() + " as Object[];g.V(aaiStartQuery)";
+ if (!"".equals(query)) {
+ query = startPrefix + query;
+ } else {
+ query = startPrefix;
+ }
+ }
+
+ } else {
+ query = gremlin.get();
+ }
+
+ return new Pair<>(query, params);
+ }
+
+ public static class Builder {
+
+ private final TransactionalGraphEngine dbEngine;
+ private Optional<URI> uri = Optional.empty();
+ private Optional<String> gremlin = Optional.empty();
+ private boolean isGremlin = false;
+ private Optional<Collection<Vertex>> vertices = Optional.empty();
+ private QueryProcessorType processorType = QueryProcessorType.GREMLIN_SERVER;
+
+ public Builder(TransactionalGraphEngine dbEngine) {
+ this.dbEngine = dbEngine;
+ }
+
+ public Builder queryFrom(URI uri) {
+ this.uri = Optional.of(uri);
+ this.isGremlin = false;
+ return this;
+ }
+
+ public Builder startFrom(Collection<Vertex> vertices) {
+ this.vertices = Optional.of(vertices);
+ return this;
+ }
+
+ public Builder queryFrom(String gremlin) {
+ this.gremlin = Optional.of(gremlin);
+ this.isGremlin = true;
+ return this;
+ }
+
+ public Builder processWith(QueryProcessorType type) {
+ this.processorType = type;
+ return this;
+ }
+ public TransactionalGraphEngine getDbEngine() {
+ return dbEngine;
+ }
+
+ public Optional<URI> getUri() {
+ return uri;
+ }
+
+ public Optional<String> getGremlin() {
+ return gremlin;
+ }
+
+ public boolean isGremlin() {
+ return isGremlin;
+ }
+
+ public Optional<Collection<Vertex>> getVertices() {
+ return vertices;
+ }
+
+ public QueryProcessorType getProcessorType() {
+ return processorType;
+ }
+
+ public GenericQueryProcessor create() {
+
+ if (this.getProcessorType().equals(QueryProcessorType.GREMLIN_SERVER)) {
+ return new GremlinServerImpl(this);
+ } else if (this.getProcessorType().equals(QueryProcessorType.LOCAL_GROOVY)) {
+ return new GroovyShellImpl(this);
+ } else {
+ return new GremlinServerImpl(this);
+ }
+ }
+
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java
new file mode 100644
index 0000000..69e08ff
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinGroovyShellSingleton.java
@@ -0,0 +1,89 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.codehaus.groovy.ast.ClassHelper;
+import org.codehaus.groovy.ast.expr.ClassExpression;
+import org.codehaus.groovy.ast.expr.PropertyExpression;
+import org.codehaus.groovy.control.CompilerConfiguration;
+import org.codehaus.groovy.control.customizers.ASTTransformationCustomizer;
+import org.codehaus.groovy.control.customizers.ImportCustomizer;
+
+import groovy.lang.Binding;
+import groovy.lang.GroovyShell;
+import groovy.lang.Script;
+import groovy.transform.TimedInterrupt;
+
+/**
+ * Creates and returns a groovy shell with the
+ * configuration to statically import graph classes
+ *
+ */
+public class GremlinGroovyShellSingleton {
+
+ private final GroovyShell shell;
+ private GremlinGroovyShellSingleton() {
+ Map<String, Object> parameters = new HashMap<>();
+ parameters.put("value", 30000);
+ parameters.put("unit", new PropertyExpression(new ClassExpression(ClassHelper.make(TimeUnit.class)),"MILLISECONDS"));
+
+ ASTTransformationCustomizer custom = new ASTTransformationCustomizer(parameters, TimedInterrupt.class);
+ ImportCustomizer imports = new ImportCustomizer();
+ imports.addStaticStars(
+ "org.apache.tinkerpop.gremlin.process.traversal.P"
+ );
+ imports.addImports(
+ "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__",
+ "org.apache.tinkerpop.gremlin.structure.T",
+ "org.apache.tinkerpop.gremlin.process.traversal.P");
+ CompilerConfiguration config = new CompilerConfiguration();
+ config.addCompilationCustomizers(custom, imports);
+
+ this.shell = new GroovyShell(config);
+ }
+
+ private static class Helper {
+ private static final GremlinGroovyShellSingleton INSTANCE = new GremlinGroovyShellSingleton();
+ }
+
+ public static GremlinGroovyShellSingleton getInstance() {
+
+ return Helper.INSTANCE;
+ }
+
+ /**
+ * @param traversal
+ * @param params
+ * @return result of graph traversal
+ */
+ public GraphTraversal<?, ?> executeTraversal (String traversal, Map<String, Object> params) {
+ Binding binding = new Binding(params);
+ Script script = shell.parse(traversal);
+ script.setBinding(binding);
+ return (GraphTraversal<?, ?>) script.run();
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java
new file mode 100644
index 0000000..a05b3ae
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerImpl.java
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Vector;
+
+import org.apache.tinkerpop.gremlin.driver.Client;
+import org.apache.tinkerpop.gremlin.driver.Cluster;
+import org.apache.tinkerpop.gremlin.driver.ResultSet;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+
+import org.onap.aai.util.AAIConfig;
+
+public class GremlinServerImpl extends GenericQueryProcessor {
+
+
+ protected GremlinServerImpl(Builder builder) {
+ super(builder);
+ }
+
+
+ @Override
+ protected GraphTraversal<?,?> runQuery(String query, Map<String, Object> params) {
+
+ //must force them into ids because of serialization issue with
+ //tinkerpop-3.0.1-incubating
+ query += ".id()";
+ String rebindGraph = AAIConfig.get("aai.server.rebind", "g");
+
+ if(!"g".equals(rebindGraph)){
+ query = query.replaceFirst("g\\.V\\(", rebindGraph + ".V(");
+ }
+
+ Cluster cluster = gremlinServerSingleton.getCluster();
+ Client client = cluster.connect();
+
+ ResultSet results = client.submit(query, params);
+
+
+ List<Object> vIds = new Vector<>();
+ results.stream().forEach(x -> {
+ Object obj = x.getObject();
+ vIds.add(obj);
+ });
+
+ client.close();
+
+ if (vIds.isEmpty()) {
+ return __.start();
+ } else {
+ return this.dbEngine.asAdmin().getTraversalSource().V(vIds.toArray());
+ }
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java
new file mode 100644
index 0000000..e4ac815
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GremlinServerSingleton.java
@@ -0,0 +1,131 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.FileWatcher;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.tinkerpop.gremlin.driver.Cluster;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Date;
+import java.util.Properties;
+import java.util.Timer;
+import java.util.TimerTask;
+
+public class GremlinServerSingleton {
+
+ private static EELFLogger logger = EELFManager.getInstance().getLogger(GremlinServerSingleton.class);
+
+ private Cluster cluster;
+ private boolean timerSet;
+ private Timer timer;
+ private Properties properties;
+
+ private static class Helper {
+ private static final GremlinServerSingleton INSTANCE = new GremlinServerSingleton();
+ }
+
+ public static GremlinServerSingleton getInstance() {
+ return Helper.INSTANCE;
+ }
+
+ private GremlinServerSingleton(){
+ init();
+ }
+
+ /**
+ * Initializes the gremlin server singleton
+ * Loads the configuration of the gremlin server and creates a cluster
+ * Loads the gremlin query file into the properties object
+ * Then creates a file watcher to watch the file every ten seconds
+ * and if there is a change in the file, then reloads the file into
+ * the properties object
+ *
+ */
+ private void init() {
+
+ properties = new Properties();
+
+ try {
+ cluster = Cluster.build(new File(AAIConstants.AAI_HOME_ETC_APP_PROPERTIES + "gremlin-server-config.yaml"))
+ .maxContentLength(6537920)
+ .create();
+ } catch (FileNotFoundException e) {
+ logger.error("Unable to find the file: " + e);
+ }
+
+ File queryFile = new File(AAIConstants.AAI_HOME_ETC_QUERY);
+
+ try (FileInputStream fis = new FileInputStream(queryFile)){
+ properties.load(fis);
+ } catch (IOException e) {
+ logger.error("Error occurred during the processing of query file: " + e);
+ }
+
+
+ TimerTask task = new FileWatcher(new File(AAIConstants.AAI_HOME_ETC_QUERY)) {
+ @Override
+ protected void onChange(File file) {
+ File queryFile = new File(AAIConstants.AAI_HOME_ETC_QUERY);
+ try (FileInputStream fis = new FileInputStream(queryFile)){
+ properties.load(fis);
+ logger.debug("File: " + file + " was changed so the cluster is rebuild for gremlin server");
+ } catch (FileNotFoundException e) {
+ logger.error("Unable to find the file: " + e);
+ } catch (IOException e) {
+ logger.error("Error occurred during the processing of query file: " + e);
+ }
+ }
+ };
+
+ if (!timerSet) {
+ timerSet = true;
+ timer = new Timer();
+ timer.schedule( task , new Date(), 10000 );
+ }
+
+ }
+
+ public Cluster getCluster(){
+ return cluster;
+ }
+
+ /**
+ * Gets the key if the properties contains that key
+ *
+ * Purposely not checking if the property exists due
+ * to if you check for the property and then get the property
+ * Then you are going to have to synchronize the method
+ *
+ * @param key the query to check if it exists in the file
+ * @return string if the key exists or null if it doesn't
+ */
+ public String getStoredQuery(String key){
+ return (String) properties.get(key);
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java
new file mode 100644
index 0000000..e4ddbfe
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyQueryBuilderSingleton.java
@@ -0,0 +1,97 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.codehaus.groovy.ast.ClassHelper;
+import org.codehaus.groovy.ast.expr.ClassExpression;
+import org.codehaus.groovy.ast.expr.PropertyExpression;
+import org.codehaus.groovy.control.CompilerConfiguration;
+import org.codehaus.groovy.control.customizers.ASTTransformationCustomizer;
+import org.codehaus.groovy.control.customizers.ImportCustomizer;
+import org.onap.aai.query.builder.QueryBuilder;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import groovy.lang.Binding;
+import groovy.lang.GroovyShell;
+import groovy.lang.Script;
+import groovy.transform.TimedInterrupt;
+
+/**
+ * Creates and returns a groovy shell with the
+ * configuration to statically import graph classes
+ *
+ */
+public class GroovyQueryBuilderSingleton {
+
+ private final GroovyShell shell;
+ private GroovyQueryBuilderSingleton() {
+ Map<String, Object> parameters = new HashMap<>();
+ parameters.put("value", 30000);
+ parameters.put("unit", new PropertyExpression(new ClassExpression(ClassHelper.make(TimeUnit.class)),"MILLISECONDS"));
+
+ ASTTransformationCustomizer custom = new ASTTransformationCustomizer(parameters, TimedInterrupt.class);
+ ImportCustomizer imports = new ImportCustomizer();
+ imports.addStaticStars(
+ "org.apache.tinkerpop.gremlin.process.traversal.P"
+ );
+ imports.addImports(
+ "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__",
+ "org.apache.tinkerpop.gremlin.structure.T",
+ "org.apache.tinkerpop.gremlin.process.traversal.P",
+ "org.onap.aai.serialization.db.EdgeType");
+ CompilerConfiguration config = new CompilerConfiguration();
+ config.addCompilationCustomizers(custom, imports);
+
+ this.shell = new GroovyShell(config);
+ }
+
+ private static class Helper {
+ private static final GroovyQueryBuilderSingleton INSTANCE = new GroovyQueryBuilderSingleton();
+ }
+
+ public static GroovyQueryBuilderSingleton getInstance() {
+
+ return Helper.INSTANCE;
+ }
+
+ /**
+ * @param traversal
+ * @param params
+ * @return result of graph traversal
+ */
+ public String executeTraversal (TransactionalGraphEngine engine, String traversal, Map<String, Object> params) {
+ QueryBuilder<Vertex> builder = engine.getQueryBuilder(QueryStyle.GREMLIN_TRAVERSAL);
+ Binding binding = new Binding(params);
+ binding.setVariable("builder", builder);
+ Script script = shell.parse(traversal);
+ script.setBinding(binding);
+ script.run();
+
+ return builder.getQuery();
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java
new file mode 100644
index 0000000..272ccd0
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java
@@ -0,0 +1,48 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.Map;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+
+import org.onap.aai.restcore.search.GremlinGroovyShellSingleton;
+
+public class GroovyShellImpl extends GenericQueryProcessor {
+
+ protected GroovyShellImpl(Builder builder) {
+ super(builder);
+ }
+
+ @Override
+ protected GraphTraversal<?,?> runQuery(String query, Map<String, Object> params) {
+
+ params.put("g", this.dbEngine.asAdmin().getTraversalSource());
+
+ GremlinGroovyShellSingleton shell = GremlinGroovyShellSingleton.getInstance();
+
+ return shell.executeTraversal(query, params);
+ }
+
+}
+
+
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java
new file mode 100644
index 0000000..49d9dc5
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/ModelAndNamedQueryRestProvider.java
@@ -0,0 +1,200 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.ArrayList;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+import org.onap.aai.dbgraphmap.SearchGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.extensions.AAIExtensionMap;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.util.AAIApiVersion;
+
+/**
+ * Implements the search subdomain in the REST API. All API calls must include
+ * X-FromAppId and X-TransactionId in the header.
+ *
+
+ *
+ */
+
+@Path("/search")
+public class ModelAndNamedQueryRestProvider extends RESTAPI {
+
+ protected static String authPolicyFunctionName = "search";
+
+ public static final String NAMED_QUERY = "/named-query";
+
+ public static final String MODEL_QUERY = "/model";
+
+ /**
+ * Gets the named query response.
+ *
+ * @param headers the headers
+ * @param req the req
+ * @param queryParameters the query parameters
+ * @return the named query response
+ */
+ /* ---------------- Start Named Query --------------------- */
+ @POST
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ @Path(NAMED_QUERY)
+ public Response getNamedQueryResponse(@Context HttpHeaders headers,
+ @Context HttpServletRequest req,
+ String queryParameters) {
+ AAIException ex = null;
+ Response response = null;
+ String fromAppId = null;
+ String transId = null;
+ String rqstTm = genDate();
+ ArrayList<String> templateVars = new ArrayList<String>();
+ try {
+ fromAppId = getFromAppId(headers);
+ transId = getTransId(headers);
+
+ AAIExtensionMap aaiExtMap = new AAIExtensionMap();
+ aaiExtMap.setHttpHeaders(headers);
+ aaiExtMap.setServletRequest(req);
+ aaiExtMap.setApiVersion(AAIApiVersion.get());
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ //only consider header value for search
+ DBConnectionType type = this.determineConnectionType("force-cache", realTime);
+
+ SearchGraph searchGraph = new SearchGraph();
+ response = searchGraph.runNamedQuery(fromAppId, transId, queryParameters, type, aaiExtMap);
+
+ String respTm = genDate();
+
+ } catch (AAIException e) {
+ // send error response
+ ex = e;
+ templateVars.add("POST Search");
+ templateVars.add("getNamedQueryResponse");
+ response = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+ .build();
+ } catch (Exception e) {
+ // send error response
+ ex = new AAIException("AAI_4000", e);
+ templateVars.add("POST Search");
+ templateVars.add("getNamedQueryResponse");
+ response = Response
+ .status(Status.INTERNAL_SERVER_ERROR)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ } finally {
+ // log success or failure
+ if (ex != null) {
+ ErrorLogHelper.logException(ex);
+ }
+ }
+ return response;
+ }
+
+ /**
+ * Gets the model query response.
+ *
+ * @param headers the headers
+ * @param req the req
+ * @param inboundPayload the inbound payload
+ * @param action the action
+ * @return the model query response
+ */
+ /* ---------------- Start Named Query --------------------- */
+ @POST
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ @Path(MODEL_QUERY)
+ public Response getModelQueryResponse(@Context HttpHeaders headers,
+ @Context HttpServletRequest req,
+ String inboundPayload,
+ @QueryParam("action") String action) {
+ AAIException ex = null;
+ Response response = null;
+ String fromAppId = null;
+ String transId = null;
+ String rqstTm = genDate();
+ ArrayList<String> templateVars = new ArrayList<String>();
+ try {
+ fromAppId = getFromAppId(headers);
+ transId = getTransId(headers);
+
+ AAIExtensionMap aaiExtMap = new AAIExtensionMap();
+ aaiExtMap.setHttpHeaders(headers);
+ aaiExtMap.setServletRequest(req);
+ aaiExtMap.setApiVersion(AAIApiVersion.get());
+ aaiExtMap.setFromAppId(fromAppId);
+ aaiExtMap.setTransId(transId);
+
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ //only consider header value for search
+ DBConnectionType type = this.determineConnectionType("force-cache", realTime);
+
+ SearchGraph searchGraph = new SearchGraph();
+ if (action != null && action.equalsIgnoreCase("DELETE")) {
+ response = searchGraph.executeModelOperation(fromAppId, transId, inboundPayload, type, true, aaiExtMap);
+ } else {
+ response = searchGraph.executeModelOperation(fromAppId, transId, inboundPayload, type, false, aaiExtMap);
+ }
+ String respTm = genDate();
+
+ } catch (AAIException e) {
+ // send error response
+ ex = e;
+ templateVars.add("POST Search");
+ templateVars.add("getModelQueryResponse");
+ response = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+ .build();
+ } catch (Exception e) {
+ // send error response
+ ex = new AAIException("AAI_4000", e);
+ templateVars.add("POST Search");
+ templateVars.add("getModelQueryResponse");
+ response = Response
+ .status(Status.INTERNAL_SERVER_ERROR)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ } finally {
+ // log success or failure
+ if (ex != null) {
+ ErrorLogHelper.logException(ex);
+ }
+ }
+ return response;
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java
new file mode 100644
index 0000000..832936a
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java
@@ -0,0 +1,28 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+public enum QueryProcessorType {
+
+ GREMLIN_SERVER,
+ LOCAL_GROOVY
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java b/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java
new file mode 100644
index 0000000..23bdaf0
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/search/SearchProvider.java
@@ -0,0 +1,254 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.search;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbgraphmap.SearchGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.Version;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TitanDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.utils.UrlBuilder;
+
+/**
+ * Implements the search subdomain in the REST API. All API calls must include
+ * X-FromAppId and X-TransactionId in the header.
+ *
+
+ *
+ */
+
+@Path("/{version: v2|v[789]|v1[01]|latest}/search")
+public class SearchProvider extends RESTAPI {
+
+ protected static String authPolicyFunctionName = "search";
+
+ public static final String GENERIC_QUERY = "/generic-query";
+
+ public static final String NODES_QUERY = "/nodes-query";
+
+ /**
+ * Gets the generic query response.
+ *
+ * @param headers the headers
+ * @param req the req
+ * @param startNodeType the start node type
+ * @param startNodeKeyParams the start node key params
+ * @param includeNodeTypes the include node types
+ * @param depth the depth
+ * @return the generic query response
+ */
+ /* ---------------- Start Generic Query --------------------- */
+ @GET
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ @Path(GENERIC_QUERY)
+ public Response getGenericQueryResponse(@Context HttpHeaders headers,
+ @Context HttpServletRequest req,
+ @QueryParam("start-node-type") final String startNodeType,
+ @QueryParam("key") final List<String> startNodeKeyParams,
+ @QueryParam("include") final List<String> includeNodeTypes,
+ @QueryParam("depth") final int depth,
+ @PathParam("version")String versionParam
+ ) {
+
+ AAIException ex = null;
+ Response searchResult = null;
+ String fromAppId = null;
+ String transId = null;
+ String rqstTm = genDate();
+ ArrayList<String> templateVars = new ArrayList<String>();
+ try {
+ fromAppId = getFromAppId(headers);
+ transId = getTransId(headers);
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ //only consider header value for search
+ DBConnectionType type = this.determineConnectionType("force-cache", realTime);
+ final Version version;
+ if ("latest".equals(versionParam)) {
+ version = AAIProperties.LATEST;
+ } else {
+ version = Version.valueOf(versionParam);
+ }
+ final ModelType factoryType = ModelType.MOXY;
+ Loader loader = LoaderFactory.createLoaderForVersion(factoryType, version);
+ TransactionalGraphEngine dbEngine = new TitanDBEngine(
+ QueryStyle.TRAVERSAL,
+ type,
+ loader);
+ DBSerializer dbSerializer = new DBSerializer(version, dbEngine, factoryType, fromAppId);
+ UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer);
+ SearchGraph searchGraph = new SearchGraph();
+ searchResult = searchGraph.runGenericQuery(
+ headers,
+ startNodeType,
+ startNodeKeyParams,
+ includeNodeTypes,
+ depth,
+ dbEngine,
+ loader,
+ urlBuilder
+
+ );
+
+ String respTm = genDate();
+
+ } catch (AAIException e) {
+ // send error response
+ ex = e;
+ templateVars.add("GET Search");
+ templateVars.add("getGenericQueryResponse");
+ searchResult = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+ .build();
+ } catch (Exception e) {
+ // send error response
+ ex = new AAIException("AAI_4000", e);
+ templateVars.add("GET Search");
+ templateVars.add("getGenericQueryResponse");
+ searchResult = Response
+ .status(Status.INTERNAL_SERVER_ERROR)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ } finally {
+ // log success or failure
+ if (ex != null){
+ ErrorLogHelper.logException(ex);
+ }
+ }
+
+ return searchResult;
+ }
+
+ /* ---------------- End Generic Query --------------------- */
+
+ /**
+ * Gets the nodes query response.
+ *
+ * @param headers the headers
+ * @param req the req
+ * @param searchNodeType the search node type
+ * @param edgeFilterList the edge filter list
+ * @param filterList the filter list
+ * @return the nodes query response
+ */
+ /* ---------------- Start Nodes Query --------------------- */
+ @GET
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ @Path(NODES_QUERY)
+ public Response getNodesQueryResponse(@Context HttpHeaders headers,
+ @Context HttpServletRequest req,
+ @QueryParam("search-node-type") final String searchNodeType,
+ @QueryParam("edge-filter") final List<String> edgeFilterList,
+ @QueryParam("filter") final List<String> filterList,
+ @PathParam("version")String versionParam) {
+ AAIException ex = null;
+ Response searchResult = null;
+ String fromAppId = null;
+ String transId = null;
+ String rqstTm = genDate();
+ ArrayList<String> templateVars = new ArrayList<String>();
+ try {
+ fromAppId = getFromAppId(headers);
+ transId = getTransId(headers);
+ String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+ //only consider header value for search
+ DBConnectionType type = this.determineConnectionType("force-cache", realTime);
+
+ final Version version;
+ if ("latest".equals(versionParam)) {
+ version = AAIProperties.LATEST;
+ } else {
+ version = Version.valueOf(versionParam);
+ }
+ final ModelType factoryType = ModelType.MOXY;
+ Loader loader = LoaderFactory.createLoaderForVersion(factoryType, version);
+ TransactionalGraphEngine dbEngine = new TitanDBEngine(
+ QueryStyle.TRAVERSAL,
+ type,
+ loader);
+ DBSerializer dbSerializer = new DBSerializer(version, dbEngine, factoryType, fromAppId);
+ UrlBuilder urlBuilder = new UrlBuilder(version, dbSerializer);
+ SearchGraph searchGraph = new SearchGraph();
+
+ searchResult = searchGraph.runNodesQuery(headers,
+ searchNodeType,
+ edgeFilterList,
+ filterList,
+ dbEngine,
+ loader,
+ urlBuilder);
+
+ String respTm = genDate();
+ } catch (AAIException e) {
+ // send error response
+ ex = e;
+ templateVars.add("GET Search");
+ templateVars.add("getNodesQueryResponse");
+ searchResult = Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+ .build();
+ } catch (Exception e) {
+ // send error response
+ ex = new AAIException("AAI_4000", e);
+ templateVars.add("GET Search");
+ templateVars.add("getNodesQueryResponse");
+ searchResult = Response
+ .status(Status.INTERNAL_SERVER_ERROR)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+ .build();
+ } finally {
+ // log success or failure
+ if (ex != null){
+ ErrorLogHelper.logException(ex);
+ }
+ }
+ return searchResult;
+ }
+
+
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/util/EchoResponse.java b/aai-traversal/src/main/java/org/onap/aai/rest/util/EchoResponse.java
new file mode 100644
index 0000000..55a07e4
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/util/EchoResponse.java
@@ -0,0 +1,122 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.util;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
+
+/**
+ * The Class EchoResponse.
+ */
+public class EchoResponse extends RESTAPI {
+
+ protected static String authPolicyFunctionName = "util";
+
+ public static final String echoPath = "/util/echo";
+
+ /**
+ * Simple health-check API that echos back the X-FromAppId and X-TransactionId to clients.
+ * If there is a query string, a transaction gets logged into hbase, proving the application is connected to the data store.
+ * If there is no query string, no transacction logging is done to hbase.
+ *
+ * @param headers the headers
+ * @param req the req
+ * @param myAction if exists will cause transaction to be logged to hbase
+ * @return the response
+ */
+ @GET
+ @Produces( { MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
+ @Path(echoPath)
+ public Response echoResult(@Context HttpHeaders headers, @Context HttpServletRequest req,
+ @QueryParam("action") String myAction) {
+ Response response = null;
+
+ AAIException ex = null;
+ String fromAppId = null;
+ String transId = null;
+
+ try {
+ fromAppId = getFromAppId(headers );
+ transId = getTransId(headers);
+ } catch (AAIException e) {
+ ArrayList<String> templateVars = new ArrayList<String>();
+ templateVars.add("PUT uebProvider");
+ templateVars.add("addTopic");
+ return Response
+ .status(e.getErrorObject().getHTTPResponseCode())
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+ .build();
+ }
+
+ try {
+
+ HashMap<AAIException, ArrayList<String>> exceptionList = new HashMap<AAIException, ArrayList<String>>();
+
+ ArrayList<String> templateVars = new ArrayList<String>();
+ templateVars.add(fromAppId);
+ templateVars.add(transId);
+
+ exceptionList.put(new AAIException("AAI_0002", "OK"), templateVars);
+
+ response = Response.status(Status.OK)
+ .entity(ErrorLogHelper.getRESTAPIInfoResponse(
+ headers.getAcceptableMediaTypes(), exceptionList))
+ .build();
+
+ } catch (Exception e) {
+ ex = new AAIException("AAI_4000", e);
+ ArrayList<String> templateVars = new ArrayList<String>();
+ templateVars.add(Action.GET.name());
+ templateVars.add(fromAppId +" "+transId);
+
+ response = Response
+ .status(Status.INTERNAL_SERVER_ERROR)
+ .entity(ErrorLogHelper.getRESTAPIErrorResponse(
+ headers.getAcceptableMediaTypes(), ex,
+ templateVars)).build();
+
+ } finally {
+ if (ex != null) {
+ ErrorLogHelper.logException(ex);
+ }
+
+ }
+
+ return response;
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/util/LogFormatTools.java b/aai-traversal/src/main/java/org/onap/aai/rest/util/LogFormatTools.java
new file mode 100644
index 0000000..cfda0c3
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/util/LogFormatTools.java
@@ -0,0 +1,37 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.util;
+
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+
+public class LogFormatTools {
+
+ private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
+ private static final DateTimeFormatter DTF = DateTimeFormatter.ofPattern(DATE_FORMAT)
+ .withZone(ZoneOffset.UTC);
+
+ public static String getCurrentDateTime() {
+ return DTF.format(ZonedDateTime.now());
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java b/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java
new file mode 100644
index 0000000..7d4b314
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/rest/util/ValidateEncoding.java
@@ -0,0 +1,161 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.rest.util;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URI;
+
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.UriInfo;
+
+import org.springframework.web.util.UriUtils;
+
+/**
+ * The Class ValidateEncoding.
+ */
+public class ValidateEncoding {
+
+ private final String encoding = "UTF-8";
+
+ /**
+ * Instantiates a new validate encoding.
+ */
+ private ValidateEncoding() {
+
+ }
+
+ /**
+ * The Class Helper.
+ */
+ private static class Helper {
+
+ /** The Constant INSTANCE. */
+ private static final ValidateEncoding INSTANCE = new ValidateEncoding();
+ }
+
+ /**
+ * Gets the single instance of ValidateEncoding.
+ *
+ * @return single instance of ValidateEncoding
+ */
+ public static ValidateEncoding getInstance() {
+ return Helper.INSTANCE;
+ }
+
+ /**
+ * Validate.
+ *
+ * @param uri the uri
+ * @return true, if successful
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ public boolean validate(URI uri) throws UnsupportedEncodingException {
+ boolean result = true;
+ if (!validatePath(uri.getRawPath())) {
+ result = false;
+ }
+ /*if (!validateQueryParams(uri.getRawQuery())) {
+ result = false;
+ } //TODO
+ */
+
+ return result;
+ }
+
+ /**
+ * Validate.
+ *
+ * @param info the info
+ * @return true, if successful
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ public boolean validate(UriInfo info) throws UnsupportedEncodingException {
+ boolean result = true;
+ if (!validatePath(info.getPath(false))) {
+ result = false;
+ }
+ if (!validateQueryParams(info.getQueryParameters(false))) {
+ result = false;
+ }
+
+ return result;
+ }
+
+ /**
+ * Validate path.
+ *
+ * @param path the path
+ * @return true, if successful
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ private boolean validatePath(String path) throws UnsupportedEncodingException {
+ String[] segments = path.split("/");
+ boolean valid = true;
+ for (String segment : segments) {
+ if (!this.checkEncoding(segment)) {
+ valid = false;
+ }
+ }
+
+ return valid;
+
+ }
+
+ /**
+ * Validate query params.
+ *
+ * @param params the params
+ * @return true, if successful
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ private boolean validateQueryParams(MultivaluedMap<String, String> params) throws UnsupportedEncodingException {
+ boolean valid = true;
+
+ for (String key : params.keySet()) {
+ if (!this.checkEncoding(key)) {
+ valid = false;
+ }
+ for (String item : params.get(key)) {
+ if (!this.checkEncoding(item)) {
+ valid = false;
+ }
+ }
+ }
+ return valid;
+ }
+
+ /**
+ * Check encoding.
+ *
+ * @param segment the segment
+ * @return true, if successful
+ * @throws UnsupportedEncodingException the unsupported encoding exception
+ */
+ private boolean checkEncoding(String segment) throws UnsupportedEncodingException {
+ boolean result = false;
+ String decode = UriUtils.decode(segment, encoding);
+ String encode = UriUtils.encode(decode, encoding);
+ result = segment.equals(encode);
+
+ return result;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/transforms/Converter.java b/aai-traversal/src/main/java/org/onap/aai/transforms/Converter.java
new file mode 100644
index 0000000..cb7525f
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/transforms/Converter.java
@@ -0,0 +1,26 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.transforms;
+
+public interface Converter {
+ String convert(String input);
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/transforms/LowerCamelToLowerHyphenConverter.java b/aai-traversal/src/main/java/org/onap/aai/transforms/LowerCamelToLowerHyphenConverter.java
new file mode 100644
index 0000000..df9ccc5
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/transforms/LowerCamelToLowerHyphenConverter.java
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.transforms;
+
+import com.google.common.base.CaseFormat;
+
+public class LowerCamelToLowerHyphenConverter implements Converter {
+
+ @Override
+ public String convert(String input) {
+ if(input == null){
+ return null;
+ }
+ return CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_HYPHEN, input);
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/transforms/LowerHyphenToLowerCamelConverter.java b/aai-traversal/src/main/java/org/onap/aai/transforms/LowerHyphenToLowerCamelConverter.java
new file mode 100644
index 0000000..e983dfb
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/transforms/LowerHyphenToLowerCamelConverter.java
@@ -0,0 +1,82 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.transforms;
+
+/**
+ * <b>LowerHyphenToLowerCamelConverter</b> is the converter to use
+ * for converting from the lower hyphen to lower camel case
+ * <p>
+ * Examples:
+ * lower-test => lowerTest
+ * lower-Test => lowerTest
+ * lowerTest => lowerTest
+ * lower-test-val => lowerTestVal
+ * <p>
+ *
+ */
+public class LowerHyphenToLowerCamelConverter implements Converter {
+
+ /**
+ * Converts the dash formatted string into a camel case string
+ * Ensure that the capitalization is not lost during this conversion
+ * <p>
+ * Loops through each character in the string
+ * checks if the current character is '-' and if it is then sets the
+ * boolean isPreviousCharDash to true and continues to the next iteration
+ * If the character is not '-', then checks if the previous character is dash
+ * If it is, then it will upper case the current character and appends to the builder
+ * Otherwise, it will just append the current character without any modification
+ *
+ * @param input the input string to convert to camel case
+ * @return a string that is converted to camel case
+ * if the input is null, then it returns null
+ */
+ @Override
+ public String convert(String input) {
+ if(input == null){
+ return null;
+ }
+
+ int size = input.length();
+ StringBuilder builder = new StringBuilder(size);
+
+ boolean isPreviousCharDash = false;
+
+ for(int index = 0; index < size; ++index){
+ char ch = input.charAt(index);
+
+ if(ch == '-'){
+ isPreviousCharDash = true;
+ continue;
+ }
+ if(isPreviousCharDash){
+ builder.append(Character.toUpperCase(ch));
+ isPreviousCharDash = false;
+ } else{
+ builder.append(ch);
+ }
+ }
+
+ return builder.toString();
+ }
+
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/transforms/MapTraverser.java b/aai-traversal/src/main/java/org/onap/aai/transforms/MapTraverser.java
new file mode 100644
index 0000000..983602e
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/transforms/MapTraverser.java
@@ -0,0 +1,88 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.transforms;
+
+
+import joptsimple.internal.Objects;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class MapTraverser {
+
+ private Converter converter;
+
+ public MapTraverser(Converter converter){
+ this.converter = converter;
+ }
+
+ public Map<String, Object> convertKeys(Map<String, Object> map){
+
+ Objects.ensureNotNull(map);
+
+ Map<String, Object> modifiedMap = new HashMap<String, Object>();
+ convertKeys(map, modifiedMap);
+
+ return modifiedMap;
+ }
+
+ private Map<String, Object> convertKeys(Map<String, Object> original, Map<String, Object> modified){
+
+ for(Map.Entry<String, Object> entry : original.entrySet()){
+ String key = entry.getKey();
+ key = converter.convert(key);
+ Object value = entry.getValue();
+ if(value instanceof Map){
+ modified.put(key, convertKeys((Map<String, Object>)value, new HashMap<String, Object>()));
+ } else if(value instanceof List){
+ modified.put(key, convertKeys((List<Object>) value));
+ } else {
+ modified.put(key, value);
+ }
+ }
+
+ return modified;
+ }
+
+ public List<Object> convertKeys(List<Object> list){
+
+ List<Object> modifiedList = new ArrayList<Object>();
+ if(list != null && list.size() > 0){
+
+ for(Object o : list){
+ if(o instanceof Map){
+ Map<String, Object> map = (Map<String, Object>) o;
+ modifiedList.add(convertKeys(map));
+ } else if(o instanceof List){
+ List<Object> l = (List<Object>) o;
+ modifiedList.add(convertKeys(l));
+ } else {
+ modifiedList.add(o);
+ }
+ }
+ }
+
+ return modifiedList;
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java b/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
new file mode 100644
index 0000000..49a77d8
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/util/AAIAppServletContextListener.java
@@ -0,0 +1,110 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.util;
+
+import java.io.IOException;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
+//import org.apache.activemq.broker.BrokerService;
+
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.ModelInjestor;
+import org.onap.aai.logging.ErrorLogHelper;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+public class AAIAppServletContextListener implements ServletContextListener {
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AAIAppServletContextListener.class.getName());
+
+ //private BrokerService broker = new BrokerService();
+
+ /**
+ * Destroys Context
+ *
+ * @param arg0 the ServletContextEvent
+ */
+ public void contextDestroyed(ServletContextEvent arg0) {
+ LOGGER.info("AAIGraph shutting down");
+ AAIGraph.getInstance().graphShutdown();
+ LOGGER.info("AAIGraph shutdown");
+
+ //try {
+ ////broker.stop();
+ //} catch (Exception e) {
+ //// TODO Auto-generated catch block
+ //e.printStackTrace();
+ //}
+ }
+
+ /**
+ * Initializes Context
+ *
+ * @param arg0 the ServletContextEvent
+ */
+ public void contextInitialized(ServletContextEvent arg0) {
+ System.setProperty("org.onap.aai.serverStarted", "false");
+ LOGGER.info("***AAI Server initialization started...");
+
+ try {
+ LOGGER.info("Loading aaiconfig.properties");
+ AAIConfig.init();
+
+ LOGGER.info("Loading error.properties");
+ ErrorLogHelper.loadProperties();
+
+ LOGGER.info("Loading graph database");
+
+ AAIGraph.getInstance();
+ ModelInjestor.getInstance();
+
+ // Jsm internal broker for aai events
+ //broker = new BrokerService();
+ //broker.addConnector("tcp://localhost:61446");
+ //broker.setPersistent(false);
+ //broker.setUseJmx(false);
+ //broker.setSchedulerSupport(false);
+ //broker.start();
+
+ LOGGER.info("AAI Server initialization succcessful.");
+ System.setProperty("org.onap.aai.serverStarted", "true");
+
+ } catch (AAIException e) {
+ ErrorLogHelper.logException(e);
+ throw new RuntimeException("AAIException caught while initializing AAI server", e);
+ } catch (IOException e) {
+ ErrorLogHelper.logError("AAI_4000", e.getMessage());
+ throw new RuntimeException("IOException caught while initializing AAI server", e);
+ } catch (Exception e) {
+ LOGGER.error("Unknown failure while initializing AAI Server", e);
+ throw new RuntimeException("Unknown failure while initializing AAI server", e);
+ }
+
+ LOGGER.info("Graph-Query MicroService Started");
+ LOGGER.error("Graph-Query MicroService Started");
+ LOGGER.debug("Graph-Query MicroService Started");
+
+ }
+}
diff --git a/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java b/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java
new file mode 100644
index 0000000..fde25a7
--- /dev/null
+++ b/aai-traversal/src/main/java/org/onap/aai/util/MakeNamedQuery.java
@@ -0,0 +1,254 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.util;
+import java.io.File;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.UUID;
+
+import org.apache.commons.io.FileUtils;
+
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.Version;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+
+public class MakeNamedQuery {
+
+ public static void main(String[] args) throws Exception {
+ String _apiVersion = AAIConfig.get(AAIConstants.AAI_DEFAULT_API_VERSION_PROP);
+ String widgetJsonDir = null;
+ String modelVersion = null;
+ String namedQueryUuid = UUID.randomUUID().toString();
+ if (args.length > 0) {
+ if (args[0] != null) {
+ _apiVersion = args[0];
+ }
+ if (args[1] != null) {
+ widgetJsonDir = args[1];
+ }
+ if (args[2] != null) {
+ modelVersion = args[2];
+ }
+ if (args[3] != null) {
+ namedQueryUuid = args[3];
+ }
+ }
+
+ if (widgetJsonDir == null) {
+ System.err.println("You must specify a directory for widgetModelJson");
+ System.exit(0);
+ }
+ if (modelVersion == null) {
+ System.err.println("You must specify a modelVersion");
+ System.exit(0);
+ }
+
+
+ Loader loader = LoaderFactory.createLoaderForVersion(ModelType.MOXY, Version.valueOf(_apiVersion));
+
+ // iterate the collection of resources
+
+ ArrayList<String> processedWidgets = new ArrayList<String>();
+
+
+ HashMap<String, List<Introspector>> widgetToRelationship = new HashMap<String, List<Introspector>>();
+ for (Entry<String, Introspector> aaiResEnt : loader.getAllObjects().entrySet()) {
+ Introspector meObject = loader.introspectorFromName("model");
+ // no need for a ModelVers DynamicEntity
+
+ Introspector aaiRes = aaiResEnt.getValue();
+
+ if (!(aaiRes.isContainer() || aaiRes.getName().equals("aai-internal"))) {
+ String resource = aaiRes.getName();
+
+ if (processedWidgets.contains(resource)) {
+ continue;
+ }
+ processedWidgets.add(resource);
+
+ String widgetName = resource;
+ String filePathString = widgetJsonDir + "/" + widgetName + "-" + modelVersion + ".json";
+ File f = new File(filePathString);
+ if (f.exists()) {
+ System.out.println(f.toString());
+ String json = FileUtils.readFileToString(f);
+
+ meObject = loader.unmarshal("Model", json);
+ String modelInvariantId = meObject.getValue("model-invariant-id");
+ if (meObject.hasProperty("model-vers")) {
+ Introspector modelVers = meObject.getWrappedValue("model-vers");
+ List<Introspector> modelVerList = (List<Introspector>) modelVers.getWrappedListValue("model-ver");
+ for (Introspector modelVer : modelVerList) {
+
+ List<Introspector> relList = new ArrayList<Introspector>();
+ Introspector widgetRelationship = makeWidgetRelationship(loader, modelInvariantId,
+ modelVer.getValue("model-version-id").toString());
+ relList.add(widgetRelationship);
+
+ widgetToRelationship.put(widgetName, relList);
+ }
+ }
+ }
+ }
+ }
+
+// esr-system-info-from-vnf=builder.store('x').union(\
+// builder.newInstance().createEdgeTraversal(EdgeType.COUSIN, 'generic-vnf', 'vserver').store('x').union(\
+// builder.newInstance().createEdgeTraversal(EdgeType.TREE, 'vserver', 'tenant').store('x')\
+// .createEdgeTraversal(EdgeType.TREE, 'tenant', 'cloud-region').store('x')\
+// .createEdgeTraversal(EdgeType.TREE, 'cloud-region', 'esr-system-info').store('x')\
+// )).cap('x').unfold.dedup()
+
+ //source vnf-id, related service-instance-id, all related vnfs in this service-instance-id
+
+ //this should be abstracted and moved to a file
+
+ HashMap<String, List<Introspector>> relationshipMap = new HashMap<String, List<Introspector>>();
+
+ List<Introspector> genericVnfRelationship = widgetToRelationship.get("generic-vnf");
+ List<Introspector> vserverRelationship = widgetToRelationship.get("vserver");
+ List<Introspector> tenantRelationship = widgetToRelationship.get("tenant");
+ List<Introspector> cloudRegionRelationship = widgetToRelationship.get("cloud-region");
+ List<Introspector> esrSystemInfoRelationship = widgetToRelationship.get("esr-system-info");
+
+ Introspector namedQueryObj = loader.introspectorFromName("named-query");
+ namedQueryObj.setValue("named-query-uuid", namedQueryUuid);
+ namedQueryObj.setValue("named-query-name", "vnf-to-esr-system-info");
+ namedQueryObj.setValue("named-query-version", "1.0");
+ namedQueryObj.setValue("description", "Named Query - VNF to ESR System Info");
+
+ Introspector genericVnfNQE = setupNQElements(namedQueryObj, genericVnfRelationship);
+
+ Introspector vserverNQE = setupNQElements(genericVnfNQE, vserverRelationship);
+
+ Introspector tenantNQE = setupNQElements(vserverNQE, tenantRelationship);
+
+ Introspector cloudRegionNQE = setupNQElements(tenantNQE, cloudRegionRelationship);
+
+ Introspector esrSystemInfoNQE = setupNQElements(cloudRegionNQE, esrSystemInfoRelationship);
+
+ System.out.println(namedQueryObj.marshal(true));
+
+ System.exit(0);
+
+ }
+ private static List<Introspector> getRels(String widgetName, HashMap<String, Introspector> widgetToRelationship) {
+ List<Introspector> relList = new ArrayList<Introspector>();
+ Introspector genericVnfRelationship = widgetToRelationship.get(widgetName);
+ relList.add(genericVnfRelationship);
+ return relList;
+ }
+
+ private static Introspector setupNQElements (Introspector nqeObj, List<Introspector> listOfRelationships) {
+ Introspector newNQElement = null;
+ try {
+ Introspector newNQElements = null;
+ List<Object> nqElementList = null;
+ if (nqeObj.getWrappedValue("named-query-elements") != null) {
+ newNQElements = nqeObj.getWrappedValue("named-query-elements");
+ nqElementList = newNQElements.getValue("named-query-element");
+ } else {
+ newNQElements = nqeObj.newIntrospectorInstanceOfProperty("named-query-elements");
+ nqeObj.setValue("named-query-elements", newNQElements.getUnderlyingObject());
+ nqElementList = (List<Object>)newNQElements.getValue("named-query-element");
+ }
+ newNQElement = loadNQElement(newNQElements, listOfRelationships);
+ nqElementList.add(newNQElement.getUnderlyingObject());
+
+ } catch (AAIUnknownObjectException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IllegalArgumentException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ return newNQElement;
+ }
+
+ private static Introspector loadNQElement (Introspector nqElements, List<Introspector> listOfRelationships) {
+ Introspector newNqElement = null;
+ try {
+ newNqElement = nqElements.getLoader().introspectorFromName("named-query-element");
+
+ //newNqElement.setValue("named-query-element-uuid", UUID.randomUUID().toString());
+
+ Introspector newRelationshipList = newNqElement.getLoader().introspectorFromName("relationship-list");
+ newNqElement.setValue("relationship-list", newRelationshipList.getUnderlyingObject());
+
+ List<Object> newRelationshipListList = (List<Object>)newRelationshipList.getValue("relationship");
+
+ for (Introspector rel : listOfRelationships) {
+ newRelationshipListList.add(rel.getUnderlyingObject());
+ }
+
+ } catch (AAIUnknownObjectException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IllegalArgumentException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ return newNqElement;
+
+ }
+ private static Introspector makeWidgetRelationship(Loader loader, String modelInvariantId, String modelVersionId) {
+
+ Introspector newRelationship = null;
+ try {
+ newRelationship = loader.introspectorFromName("relationship");
+
+ List<Object> newRelationshipData = (List<Object>)newRelationship.getValue("relationship-data");
+
+ newRelationship.setValue("related-to", "model");
+
+ Introspector newRelationshipDatum1 = newRelationship.getLoader().introspectorFromName("relationship-data");
+ Introspector newRelationshipDatum2 = newRelationship.getLoader().introspectorFromName("relationship-data");
+
+
+ newRelationshipDatum1.setValue("relationship-key", "model.model-invariant-id");
+ newRelationshipDatum1.setValue("relationship-value", modelInvariantId);
+
+ //newRelationshipDatum2.setValue("relationship-key", "model-ver.model-version-id");
+ //newRelationshipDatum2.setValue("relationship-value", modelVersionId);
+
+ newRelationshipData.add(newRelationshipDatum1.getUnderlyingObject());
+ //newRelationshipData.add(newRelationshipDatum2.getUnderlyingObject());
+ } catch (AAIUnknownObjectException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ } catch (IllegalArgumentException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ return newRelationship;
+ }
+
+} \ No newline at end of file