aboutsummaryrefslogtreecommitdiffstats
path: root/asdctool/src/main/java
diff options
context:
space:
mode:
authorMichael Lando <ml636r@att.com>2017-02-19 10:28:42 +0200
committerMichael Lando <ml636r@att.com>2017-02-19 10:51:01 +0200
commit451a3400b76511393c62a444f588a4ed15f4a549 (patch)
treee4f5873a863d1d3e55618eab48b83262f874719d /asdctool/src/main/java
parent5abfe4e1fb5fae4bbd5fbc340519f52075aff3ff (diff)
Initial OpenECOMP SDC commit
Change-Id: I0924d5a6ae9cdc161ae17c68d3689a30d10f407b Signed-off-by: Michael Lando <ml636r@att.com>
Diffstat (limited to 'asdctool/src/main/java')
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java152
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java830
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java51
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java694
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java364
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java388
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java103
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java83
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java180
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java132
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java144
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java538
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/DerivedFromAlignment.java232
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/GroupsAlignment.java201
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationCategory.java48
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationSubCategory.java36
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java1703
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java185
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1607/CsarMigration.java93
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java387
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java461
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java97
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java109
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java169
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java251
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/RemoveUtils.java78
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java122
27 files changed, 7831 insertions, 0 deletions
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
new file mode 100644
index 0000000000..3b7d3ec11d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
@@ -0,0 +1,152 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.tinkerpop.gremlin.structure.Element;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+
+//import org.openecomp.sdc.be.auditing.impl.AuditingManager;
+
+//import org.openecomp.sdc.be.info.errors.ResponseFormat;
+
+public class Utils {
+
+ private static Logger log = LoggerFactory.getLogger(Utils.class.getName());
+
+ public static String NEW_LINE = System.getProperty("line.separator");
+
+ public static Response buildOkResponse(
+ /*
+ * ResponseFormat errorResponseWrapper,
+ */int status, Object entity, Map<String, String> additionalHeaders) {
+ // int status = errorResponseWrapper.getStatus();
+ ResponseBuilder responseBuilder = Response.status(status);
+ if (entity != null) {
+ log.trace("returned entity is {}", entity.toString());
+ responseBuilder = responseBuilder.entity(entity);
+ }
+ if (additionalHeaders != null) {
+ for (Entry<String, String> additionalHeader : additionalHeaders.entrySet()) {
+ String headerName = additionalHeader.getKey();
+ String headerValue = additionalHeader.getValue();
+ log.trace("Adding header {} with value {} to the response", headerName, headerValue);
+ responseBuilder.header(headerName, headerValue);
+ }
+ }
+ return responseBuilder.build();
+ }
+
+ public static TitanGraph openGraph(Configuration conf) {
+
+ TitanGraph graph = null;
+ try {
+
+ graph = TitanFactory.open(conf);
+
+ } catch (Exception e) {
+ log.error("Failed to start open graph", e);
+ }
+
+ return graph;
+
+ }
+
+ public static boolean vertexLeftContainsRightProps(Map<String, Object> leftProps, Map<String, Object> rightProps) {
+
+ if (rightProps != null) {
+
+ for (Entry<String, Object> entry : rightProps.entrySet()) {
+ String key = entry.getKey();
+ Object leftValue = leftProps.get(key);
+ Object rightValue = entry.getValue();
+ if (leftValue == null) {
+ if (rightValue == null) {
+ continue;
+ } else {
+ log.debug("The key {} cannot be found in the properties {}", key, leftProps);
+ return false;
+ }
+ }
+
+ // if (false == leftValue instanceof Map && false == leftValue
+ // instanceof List) {
+ if (false == leftValue.equals(rightValue)) {
+ log.trace("The value of key {} is different between properties {} vs {}", key, leftValue, rightValue);
+ return false;
+ }
+ // }
+ }
+
+ }
+
+ return true;
+ }
+
+ public static void setProperties(Element element, Map<String, Object> properties) {
+
+ if (properties != null && false == properties.isEmpty()) {
+
+ Object[] propertyKeyValues = new Object[properties.size() * 2];
+ int i = 0;
+ for (Entry<String, Object> entry : properties.entrySet()) {
+ propertyKeyValues[i++] = entry.getKey();
+ propertyKeyValues[i++] = entry.getValue();
+ }
+
+ ElementHelper.attachProperties(element, propertyKeyValues);
+
+ }
+
+ }
+
+ public static Map<String, Object> getProperties(Element element) {
+
+ Map<String, Object> result = new HashMap<String, Object>();
+ ;
+
+ if (element.keys() != null && element.keys().size() > 0) {
+ Map<String, Property> propertyMap = ElementHelper.propertyMap(element,
+ element.keys().toArray(new String[element.keys().size()]));
+
+ for (Entry<String, Property> entry : propertyMap.entrySet()) {
+ String key = entry.getKey();
+ Object value = entry.getValue().value();
+
+ result.put(key, value);
+ }
+ }
+ return result;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
new file mode 100644
index 0000000000..a78ea9bc18
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
@@ -0,0 +1,830 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import com.carrotsearch.hppc.cursors.ObjectCursor;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
+import fj.data.Either;
+import org.apache.commons.lang.SystemUtils;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.cassandra.schema.Table;
+import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.auditing.*;
+import org.openecomp.sdc.common.datastructure.AuditingFieldsKeysEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.io.*;
+import java.lang.reflect.Type;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.TimeZone;
+
+/**
+ * Created by mlando on 5/16/2016.
+ */
+public class DataMigration {
+
+ private Gson gson = new Gson();
+
+ private ObjectMapper jsonMapper = new ObjectMapper();
+
+ private static Logger log = LoggerFactory.getLogger(DataMigration.class.getName());
+
+ protected ElasticSearchClient elasticSearchClient;
+ @Autowired
+ protected AuditCassandraDao auditCassandraDao;
+ @Autowired
+ protected ArtifactCassandraDao artifactCassandraDao;
+ private static final String dateFormatPattern = "yyyy-MM-dd HH:mm:ss.SSS z";
+ private static SimpleDateFormat simpleDateFormat;
+
+ /**
+ * the method exports and imports the records from ES to cassandra the flow
+ * will check to see if the files are not empty if the files are not empty
+ * the export will be skiped and the flow will use the existing files. the
+ * flow will check if the tables in cassandra are empty, if the tables are
+ * not empty the proces will stop and exit. if the tables are empty the
+ * method will import the records from the files. in case of a fail the flow
+ * will exit and clear all the Cassandra tables.
+ *
+ * @param appConfigDir
+ * the location of the dir in wich the output files will be
+ * stored
+ * @param exportFromEs
+ * should the es be exported again and overwrite the old export
+ * @param importToCassandra
+ * should we import the data into cassandra
+ * @return true in case the operation was successful.
+ */
+ public boolean migrateDataESToCassndra(String appConfigDir, boolean exportFromEs, boolean importToCassandra) {
+ initFormater();
+ if (!initEsClient())
+ return false;
+ Map<Table, File> files = createOutPutFiles(appConfigDir, exportFromEs);
+ if (files == null) {
+ return false;
+ }
+ if (exportFromEs && filesEmpty(files)) {
+ Map<Table, PrintWriter> printerWritersMap = createWriters(files);
+ if (printerWritersMap == null) {
+ return false;
+ }
+ try {
+ ImmutableOpenMap<String, IndexMetaData> indexData = getIndexData();
+ for (ObjectCursor<String> key : indexData.keys()) {
+ if ("resources".equalsIgnoreCase(key.value)) {
+ if (!exportArtifacts(key.value, printerWritersMap)) {
+ return false;
+ }
+ } else if (key.value.startsWith("auditingevents")) {
+ if (!exportAudit(key.value, printerWritersMap)) {
+ return false;
+ }
+ }
+ }
+ } finally {
+ if (elasticSearchClient != null) {
+ elasticSearchClient.close();
+ }
+ for (PrintWriter writer : printerWritersMap.values()) {
+ writer.close();
+ }
+ }
+ }
+ if (importToCassandra && !importToCassndra(files)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private void initFormater() {
+ simpleDateFormat = new SimpleDateFormat(dateFormatPattern);
+ simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ private boolean initEsClient() {
+ String configHome = System.getProperty("config.home");
+ URL url = null;
+ Settings settings = null;
+ try {
+ if (SystemUtils.IS_OS_WINDOWS) {
+ url = new URL("file:///" + configHome + "/elasticsearch.yml");
+ } else {
+ url = new URL("file:" + configHome + "/elasticsearch.yml");
+ }
+ log.debug("URL {}", url);
+ settings = Settings.settingsBuilder().loadFromPath(Paths.get(url.toURI())).build();
+ } catch (MalformedURLException | URISyntaxException e1) {
+ log.error("Failed to create URL in order to load elasticsearch yml", e1);
+ return true;
+ }
+
+ this.elasticSearchClient = new ElasticSearchClient();
+ this.elasticSearchClient.setClusterName(settings.get("cluster.name"));
+ this.elasticSearchClient.setLocal(settings.get("elasticSearch.local"));
+ this.elasticSearchClient.setTransportClient(settings.get("elasticSearch.transportclient"));
+ try {
+ elasticSearchClient.initialize();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * the method clears all the cassandra tables
+ */
+ private void truncateCassandraTable() {
+ log.info("import failed. truncating Cassandra tables.");
+ artifactCassandraDao.deleteAllArtifacts();
+ auditCassandraDao.deleteAllAudit();
+ }
+
+ /**
+ * the method imports the records from the files into cassandra
+ *
+ * @param files
+ * a map of files holding
+ * @return true if the operation was successful
+ */
+ private boolean importToCassndra(Map<Table, File> files) {
+ log.info("starting to import date into Cassandra.");
+ if (!validtaTablsNotEmpty(files))
+ return true;
+ for (Table table : files.keySet()) {
+ log.info("importing recordes into {}", table.getTableDescription().getTableName());
+ if (!handleImport(files, table)) {
+ truncateCassandraTable();
+ return false;
+ }
+ }
+ log.info("finished to import date into Cassandra.");
+ return true;
+ }
+
+ private boolean validtaTablsNotEmpty(Map<Table, File> files) {
+ for (Table table : files.keySet()) {
+ Either<Boolean, CassandraOperationStatus> isTableEmptyRes = checkIfTableIsEmpty(table);
+ if (isTableEmptyRes.isRight() || !isTableEmptyRes.left().value()) {
+ log.error("Cassandra table {} is not empty operation aborted.",
+ table.getTableDescription().getTableName());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * the method retrieves the fields from the given map and praprs them for
+ * storage as an audit according to the table name
+ *
+ * @param map
+ * the map from which we will retrive the fields enum values
+ * @param table
+ * the table we are going to store the record in.
+ * @return a enummap representing the audit record that is going to be
+ * created.
+ */
+ private EnumMap<AuditingFieldsKeysEnum, Object> createAuditMap(Map<String, String> map, Table table) {
+ EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<>(AuditingFieldsKeysEnum.class);
+ switch (table) {
+ case USER_ADMIN_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_AFTER, map.get("USER_AFTER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_BEFORE, map.get("USER_BEFORE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ break;
+ case USER_ACCESS_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_UID, map.get("USER"));
+ break;
+ case RESOURCE_ADMIN_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_INVARIANT_UUID, map.get("INVARIANT_UUID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_STATE, map.get("CURR_STATE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_PREV_VERSION, map.get("PREV_VERSION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_PREV_STATE, map.get("PREV_STATE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DPREV_STATUS, map.get("DPREV_STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DCURR_STATUS, map.get("DCURR_STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TOSCA_NODE_TYPE, map.get("TOSCA_NODE_TYPE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_COMMENT, map.get("COMMENT"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ARTIFACT_DATA, map.get("ARTIFACT_DATA"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_PREV_ARTIFACT_UUID, map.get("PREV_ARTIFACT_UUID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_CURR_ARTIFACT_UUID, map.get("CURR_ARTIFACT_UUID"));
+ break;
+ case DISTRIBUTION_DOWNLOAD_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_RESOURCE_URL, map.get("RESOURCE_URL"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
+ break;
+ case DISTRIBUTION_ENGINE_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ if (map.get("TOPIC_NAME") != null) {
+ if (map.get("TOPIC_NAME").contains("-STATUS-")) {
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME,
+ map.get("TOPIC_NAME"));
+ } else {
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME,
+ map.get("TOPIC_NAME"));
+ }
+ } else {
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME,
+ map.get("DSTATUS_TOPIC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME,
+ map.get("DNOTIF_TOPIC"));
+ }
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ROLE, map.get("ROLE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_API_KEY, map.get("API_KEY"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ENVRIONMENT_NAME, map.get("D_ENV"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
+ break;
+ case DISTRIBUTION_NOTIFICATION_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_STATE, map.get("CURR_STATE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
+ break;
+ case DISTRIBUTION_STATUS_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_RESOURCE_URL, map.get("RESOURCE_URL"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TIME, map.get("STATUS_TIME"));
+ break;
+ case DISTRIBUTION_DEPLOY_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
+ break;
+ case DISTRIBUTION_GET_UEB_CLUSTER_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ if (map.get("STATUS_DESC") != null) {
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("STATUS_DESC"));
+ } else {
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ }
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
+ break;
+ case AUTH_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_USER, map.get("USER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_URL, map.get("URL"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_STATUS, map.get("AUTH_STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_REALM, map.get("REALM"));
+ break;
+ case CONSUMER_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ECOMP_USER, map.get("ECOMP_USER"));
+ break;
+ case CATEGORY_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_CATEGORY_NAME, map.get("CATEGORY_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SUB_CATEGORY_NAME, map.get("SUB_CATEGORY_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_GROUPING_NAME, map.get("GROUPING_NAME"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
+ break;
+ case GET_USERS_LIST_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DETAILS, map.get("DETAILS"));
+ break;
+ case GET_CATEGORY_HIERARCHY_EVENT:
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
+ auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DETAILS, map.get("DETAILS"));
+ break;
+ default:
+ auditingFields = null;
+ break;
+ }
+ return auditingFields;
+ }
+
+ /**
+ * the method reads the content of the file intended for a given table, and
+ * sores them in cassandra
+ *
+ * @param files
+ * a map of files from which the recordes will be retrieved.
+ * @param table
+ * the name of the table we want to look up in the files and sore
+ * in Cassandra // * @param store the function to call when
+ * storing recordes in cassndra
+ * @return true if the operation was successful
+ */
+ private boolean handleImport(Map<Table, File> files, Table table) {
+ BufferedReader br = null;
+ try {
+ br = new BufferedReader(new FileReader(files.get(table)));
+ String line = null;
+ while ((line = br.readLine()) != null) {
+ CassandraOperationStatus res = null;
+ if (Table.ARTIFACT.equals(table)) {
+ res = artifactCassandraDao.saveArtifact(jsonMapper.readValue(line, ESArtifactData.class));
+ } else {
+ Type type = new TypeToken<Map<String, String>>() {
+ }.getType();
+ Map<String, String> map = gson.fromJson(line, type);
+ EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = createAuditMap(map, table);
+ AuditingGenericEvent recordForCassandra = null;
+ try {
+ recordForCassandra = createAuditRecord(auditingFields);
+ } catch (ParseException e) {
+ log.error("filed to parse time stemp in recored {}", auditingFields);
+ return false;
+ }
+
+ res = auditCassandraDao.saveRecord(recordForCassandra);
+ }
+ if (!res.equals(CassandraOperationStatus.OK)) {
+ log.error("save recored to cassndra {} failed with status {} aborting.",
+ table.getTableDescription().getTableName(), res);
+ return false;
+ }
+ }
+ return true;
+ } catch (IOException e) {
+ log.error("failed to read file", e);
+ return false;
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ log.error("failed to close file reader", e);
+ }
+ }
+ }
+ }
+
+ /**
+ * the method checks if the given table is empty
+ *
+ * @param table
+ * the name of the table we want to check
+ * @return true if the table is empty
+ */
+ private Either<Boolean, CassandraOperationStatus> checkIfTableIsEmpty(Table table) {
+ if (Table.ARTIFACT.equals(table)) {
+ return artifactCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
+ } else {
+ return auditCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
+ }
+ }
+
+ private boolean filesEmpty(Map<Table, File> files) {
+ for (Table table : files.keySet()) {
+ File file = files.get(table);
+ if (file.length() != 0) {
+ log.info("file:{} is not empty skipping export", table.getTableDescription().getTableName());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * the method reads the records from es index of audit's into a file as
+ * json's.
+ *
+ * @param value
+ * the name of the index we want
+ * @param printerWritersMap
+ * a map of the writers we use to write to a file.
+ * @return true in case the export was successful.
+ */
+ private boolean exportAudit(String value, Map<Table, PrintWriter> printerWritersMap) {
+ log.info("stratng to export audit data from es index{} to file.", value);
+ QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
+ SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(value).setScroll(new TimeValue(60000))
+ .setQuery(queryBuilder).setSize(100).execute().actionGet();
+ while (true) {
+ for (SearchHit hit : scrollResp.getHits().getHits()) {
+ PrintWriter out = printerWritersMap.get(TypeToTableMapping.getTableByType(hit.getType()));
+ out.println(hit.getSourceAsString());
+ }
+ scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
+ .setScroll(new TimeValue(60000)).execute().actionGet();
+ if (scrollResp.getHits().getHits().length == 0) {
+ break;
+
+ }
+ }
+
+ log.info("export audit data from es to file. finished succsesfully");
+ return true;
+ }
+
+ /**
+ * the method reads the records from es index of resources into a file as
+ * json's.
+ *
+ * @param index
+ * the name of the index we want to read
+ * @param printerWritersMap
+ * a map of the writers we use to write to a file.
+ * @return true in case the export was successful.
+ */
+ private boolean exportArtifacts(String index, Map<Table, PrintWriter> printerWritersMap) {
+ log.info("stratng to export artifact data from es to file.");
+ PrintWriter out = printerWritersMap.get(Table.ARTIFACT);
+ QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
+ SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(index).setScroll(new TimeValue(60000))
+ .setQuery(queryBuilder).setSize(100).execute().actionGet();
+ while (true) {
+ for (SearchHit hit : scrollResp.getHits().getHits()) {
+ ;
+ out.println(hit.getSourceAsString());
+ }
+ scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
+ .setScroll(new TimeValue(60000)).execute().actionGet();
+ if (scrollResp.getHits().getHits().length == 0) {
+ break;
+
+ }
+ }
+
+ log.info("export artifact data from es to file. finished succsesfully");
+ return true;
+ }
+
+ /**
+ * the method retrieves all the indexes from elasticsearch
+ *
+ * @return a map of indexes and there metadata
+ */
+ private ImmutableOpenMap<String, IndexMetaData> getIndexData() {
+ return elasticSearchClient.getClient().admin().cluster().prepareState().get().getState().getMetaData()
+ .getIndices();
+ }
+
+ /**
+ * the method creates all the files and dir which holds them. in case the
+ * files exist they will not be created again.
+ *
+ * @param appConfigDir
+ * the base path under which the output dir will be created and
+ * the export result files the created filesa are named according
+ * to the name of the table into which it will be imported.
+ * @param exportToEs
+ * if true all the export files will be recreated
+ * @returnthe returns a map of tables and the files representing them them
+ */
+ private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) {
+ Map<Table, File> result = new EnumMap<Table, File>(Table.class);
+ File outputDir = new File(appConfigDir + "/output/");
+ if (!createOutPutFolder(outputDir)) {
+ return null;
+ }
+ for (Table table : Table.values()) {
+ File file = new File(outputDir + "/" + table.getTableDescription().getTableName());
+ if (exportToEs) {
+ try {
+ if (file.exists()) {
+ Files.delete(file.toPath());
+ }
+ } catch (IOException e) {
+ log.error("failed to delete output file " + file.getAbsolutePath(), e);
+ return null;
+ }
+ file = new File(outputDir + "/" + table.getTableDescription().getTableName());
+ }
+ if (!file.exists()) {
+ try {
+ file.createNewFile();
+ } catch (IOException e) {
+ log.error("failed to create output file " + file.getAbsolutePath(), e);
+ return null;
+ }
+ }
+ result.put(table, file);
+
+ }
+ return result;
+ }
+
+ /**
+ * the method create the writers to each file
+ *
+ * @param files
+ * a map of the files according to table
+ * @return returns a map of writers according to table.
+ */
+ private Map<Table, PrintWriter> createWriters(Map<Table, File> files) {
+ Map<Table, PrintWriter> printerWritersMap = new EnumMap<>(Table.class);
+ try {
+ for (Table table : files.keySet()) {
+ log.info("creating writer for {}", table);
+ File file = files.get(table);
+ FileWriter fw = new FileWriter(file, true);
+ BufferedWriter bw = new BufferedWriter(fw);
+ PrintWriter out = new PrintWriter(bw);
+ printerWritersMap.put(table, out);
+ log.info("creating writer for {} was successful", table);
+ }
+ } catch (IOException e) {
+ log.error("create writer to file failed", e);
+ return null;
+ }
+ return printerWritersMap;
+ }
+
+ /**
+ * the method creates the output dir in case it does not exist
+ *
+ * @param outputDir
+ * the path under wich the directory will be created.
+ * @return true in case the create was succsesful or the dir already exists
+ */
+ private boolean createOutPutFolder(File outputDir) {
+ if (!outputDir.exists()) {
+ log.info("creating output dir" + outputDir.getAbsolutePath());
+ try {
+ Files.createDirectories(outputDir.toPath());
+ } catch (IOException e) {
+ log.error("failed to create output dir" + outputDir.getAbsolutePath(), e);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public enum TypeToTableMapping {
+ USER_ADMIN_EVENT_TYPE(AuditingTypesConstants.USER_ADMIN_EVENT_TYPE,
+ Table.USER_ADMIN_EVENT), USER_ACCESS_EVENT_TYPE(AuditingTypesConstants.USER_ACCESS_EVENT_TYPE,
+ Table.USER_ACCESS_EVENT), RESOURCE_ADMIN_EVENT_TYPE(
+ AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE,
+ Table.RESOURCE_ADMIN_EVENT), DISTRIBUTION_DOWNLOAD_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE,
+ Table.DISTRIBUTION_DOWNLOAD_EVENT), DISTRIBUTION_ENGINE_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE,
+ Table.DISTRIBUTION_ENGINE_EVENT), DISTRIBUTION_NOTIFICATION_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE,
+ Table.DISTRIBUTION_NOTIFICATION_EVENT), DISTRIBUTION_STATUS_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE,
+ Table.DISTRIBUTION_STATUS_EVENT), DISTRIBUTION_DEPLOY_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE,
+ Table.DISTRIBUTION_DEPLOY_EVENT), DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE,
+ Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT), AUTH_EVENT_TYPE(
+ AuditingTypesConstants.AUTH_EVENT_TYPE,
+ Table.AUTH_EVENT), CONSUMER_EVENT_TYPE(
+ AuditingTypesConstants.CONSUMER_EVENT_TYPE,
+ Table.CONSUMER_EVENT), CATEGORY_EVENT_TYPE(
+ AuditingTypesConstants.CATEGORY_EVENT_TYPE,
+ Table.CATEGORY_EVENT), GET_USERS_LIST_EVENT_TYPE(
+ AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE,
+ Table.GET_USERS_LIST_EVENT), GET_CATEGORY_HIERARCHY_EVENT_TYPE(
+ AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE,
+ Table.GET_CATEGORY_HIERARCHY_EVENT);
+
+ String typeName;
+ Table table;
+
+ TypeToTableMapping(String typeName, Table table) {
+ this.typeName = typeName;
+ this.table = table;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public Table getTable() {
+ return table;
+ }
+
+ public static Table getTableByType(String type) {
+ for (TypeToTableMapping mapping : TypeToTableMapping.values()) {
+ if (mapping.getTypeName().equalsIgnoreCase(type)) {
+ return mapping.getTable();
+ }
+ }
+ return null;
+ }
+ }
+
+ public static AuditingGenericEvent createAuditRecord(EnumMap<AuditingFieldsKeysEnum, Object> auditingFields)
+ throws ParseException {
+ AuditingActionEnum actionEnum = AuditingActionEnum
+ .getActionByName((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_ACTION));
+ String tableName = actionEnum.getAuditingEsType();
+ AuditingGenericEvent event = null;
+ Date date = null;
+ switch (tableName) {
+ case AuditingTypesConstants.USER_ADMIN_EVENT_TYPE:
+ UserAdminEvent userAdminEvent = new UserAdminEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ userAdminEvent.setTimestamp1(date);
+ event = userAdminEvent;
+ break;
+ case AuditingTypesConstants.AUTH_EVENT_TYPE:
+ AuthEvent authEvent = new AuthEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ authEvent.setTimestamp1(date);
+ event = authEvent;
+ break;
+ case AuditingTypesConstants.CATEGORY_EVENT_TYPE:
+ CategoryEvent categoryEvent = new CategoryEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ categoryEvent.setTimestamp1(date);
+ event = categoryEvent;
+ break;
+ case AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE:
+ ResourceAdminEvent resourceAdminEvent = new ResourceAdminEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ resourceAdminEvent.setTimestamp1(date);
+ event = resourceAdminEvent;
+ break;
+ case AuditingTypesConstants.USER_ACCESS_EVENT_TYPE:
+ event = new UserAccessEvent(auditingFields);
+ UserAccessEvent userAccessEvent = new UserAccessEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ userAccessEvent.setTimestamp1(date);
+ event = userAccessEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE:
+ DistributionStatusEvent distributionStatusEvent = new DistributionStatusEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ distributionStatusEvent.setTimestamp1(date);
+ event = distributionStatusEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE:
+ DistributionDownloadEvent distributionDownloadEvent = new DistributionDownloadEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ distributionDownloadEvent.setTimestamp1(date);
+ event = distributionDownloadEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE:
+ DistributionEngineEvent distributionEngineEvent = new DistributionEngineEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ distributionEngineEvent.setTimestamp1(date);
+ event = distributionEngineEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE:
+ DistributionNotificationEvent distributionNotificationEvent = new DistributionNotificationEvent(
+ auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ distributionNotificationEvent.setTimestamp1(date);
+ event = distributionNotificationEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE:
+ DistributionDeployEvent distributionDeployEvent = new DistributionDeployEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ distributionDeployEvent.setTimestamp1(date);
+ event = distributionDeployEvent;
+ break;
+ case AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE:
+ AuditingGetUebClusterEvent auditingGetUebClusterEvent = new AuditingGetUebClusterEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ auditingGetUebClusterEvent.setTimestamp1(date);
+ event = auditingGetUebClusterEvent;
+ break;
+ case AuditingTypesConstants.CONSUMER_EVENT_TYPE:
+ ConsumerEvent consumerEvent = new ConsumerEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ consumerEvent.setTimestamp1(date);
+ event = consumerEvent;
+ break;
+ case AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE:
+ GetUsersListEvent getUsersListEvent = new GetUsersListEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ getUsersListEvent.setTimestamp1(date);
+ event = getUsersListEvent;
+ break;
+ case AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE:
+ GetCategoryHierarchyEvent getCategoryHierarchyEvent = new GetCategoryHierarchyEvent(auditingFields);
+ date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
+ getCategoryHierarchyEvent.setTimestamp1(date);
+ event = getCategoryHierarchyEvent;
+ break;
+
+ }
+ return event;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
new file mode 100644
index 0000000000..2c0471fb17
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
@@ -0,0 +1,51 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class EsToCassandraDataMigrationConfig {
+ @Bean(name = "DataMigrationBean")
+ public DataMigration dataMigration() {
+ return new DataMigration();
+ }
+
+ @Bean(name = "artifact-cassandra-dao")
+ public ArtifactCassandraDao artifactCassandraDao() {
+ return new ArtifactCassandraDao();
+ }
+
+ @Bean(name = "audit-cassandra-dao")
+ public AuditCassandraDao auditCassandraDao() {
+ return new AuditCassandraDao();
+ }
+
+ @Bean(name = "cassandra-client")
+ public CassandraClient cassandraClient() {
+ return new CassandraClient();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
new file mode 100644
index 0000000000..bf62072235
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
@@ -0,0 +1,694 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.configuration.BaseConfiguration;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Element;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONWriter;
+import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
+import org.openecomp.sdc.asdctool.Utils;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.thinkaurelius.titan.core.TitanEdge;
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanGraphQuery;
+import com.thinkaurelius.titan.core.TitanVertex;
+//import com.tinkerpop.blueprints.Direction;
+//import com.tinkerpop.blueprints.Edge;
+//import com.tinkerpop.blueprints.Vertex;
+//import com.tinkerpop.blueprints.util.ElementHelper;
+//import com.tinkerpop.blueprints.util.io.graphson.GraphSONReader;
+//import com.tinkerpop.blueprints.util.io.graphson.GraphSONWriter;
+
+public class GraphMLConverter {
+
+ private static Logger log = LoggerFactory.getLogger(GraphMLConverter.class.getName());
+
+ private Gson gson = new Gson();
+
+ public boolean importGraph(String[] args) {
+
+ TitanGraph graph = null;
+ try {
+ String titanFileLocation = args[1];
+ String inputFile = args[2];
+ graph = openGraph(titanFileLocation);
+
+ List<ImmutablePair<String, String>> propertiesCriteriaToDelete = new ArrayList<>();
+ ImmutablePair<String, String> immutablePair1 = new ImmutablePair<String, String>("healthcheckis", "GOOD");
+ ImmutablePair<String, String> immutablePair2 = new ImmutablePair<String, String>("nodeLabel", "user");
+ ImmutablePair<String, String> immutablePair3 = new ImmutablePair<String, String>("nodeLabel",
+ "resourceCategory");
+ ImmutablePair<String, String> immutablePair4 = new ImmutablePair<String, String>("nodeLabel",
+ "serviceCategory");
+
+ propertiesCriteriaToDelete.add(immutablePair1);
+ propertiesCriteriaToDelete.add(immutablePair2);
+ propertiesCriteriaToDelete.add(immutablePair3);
+ propertiesCriteriaToDelete.add(immutablePair4);
+
+ boolean result = importJsonGraph(graph, inputFile, propertiesCriteriaToDelete);
+
+ return result;
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ } finally {
+ if (graph != null) {
+ // graph.shutdown();
+ graph.close();
+ }
+ }
+
+ }
+
+ public boolean exportGraph(String[] args) {
+
+ TitanGraph graph = null;
+ try {
+ String titanFileLocation = args[1];
+ String outputDirectory = args[2];
+ graph = openGraph(titanFileLocation);
+
+ String result = exportJsonGraph(graph, outputDirectory);
+
+ if (result == null) {
+ return false;
+ }
+
+ System.out.println("Exported file=" + result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ } finally {
+ if (graph != null) {
+ // graph.shutdown();
+ graph.close();
+ }
+ }
+
+ return true;
+ }
+
+ public String exportGraphMl(String[] args) {
+
+ TitanGraph graph = null;
+ String result = null;
+ try {
+ String titanFileLocation = args[1];
+ String outputDirectory = args[2];
+ graph = openGraph(titanFileLocation);
+
+ result = exportGraphMl(graph, outputDirectory);
+
+ System.out.println("Exported file=" + result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return null;
+ } finally {
+ if (graph != null) {
+ graph.close();
+ }
+ }
+
+ return result;
+ }
+
+ public boolean findErrorInJsonGraph(String[] args) {
+
+ TitanGraph graph = null;
+ try {
+ String titanFileLocation = args[1];
+ String outputDirectory = args[2];
+ graph = openGraph(titanFileLocation);
+
+ String result = findErrorInJsonGraph(graph, outputDirectory);
+
+ if (result == null) {
+ return false;
+ }
+
+ System.out.println("Exported file=" + result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ } finally {
+ if (graph != null) {
+ // graph.shutdown();
+ graph.close();
+ }
+ }
+
+ return true;
+ }
+
+ public TitanGraph openGraph(String titanFileLocation) {
+
+ TitanGraph graph = TitanFactory.open(titanFileLocation);
+
+ return graph;
+
+ }
+
+ public String exportJsonGraph(TitanGraph graph, String outputDirectory) {
+
+ String result = null;
+
+ // GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
+
+ String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".json";
+
+ OutputStream out = null;
+ try {
+ out = new BufferedOutputStream(new FileOutputStream(outputFile));
+
+ // GraphSONWriter.outputGraph(graph, outputFile);
+ final GraphSONWriter.Builder builder = GraphSONWriter.build();
+ final GraphSONMapper mapper = newGraphSONMapper(graph);
+ builder.mapper(mapper);
+ final GraphSONWriter writer = builder.create();
+ writer.writeGraph(out, graph);
+
+ // GraphSONWriter create = GraphSONWriter.build(). create();
+ // create.writeGraph(out, graph);
+
+ // graph.commit();
+ graph.tx().commit();
+
+ result = outputFile;
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ graph.tx().rollback();
+ } finally {
+ try {
+ if (out != null) {
+ out.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ return result;
+
+ }
+
+ public String exportGraphMl(TitanGraph graph, String outputDirectory) {
+ String result = null;
+ String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".graphml";
+ try {
+ try (final OutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile))) {
+ graph.io(IoCore.graphml()).writer().normalize(true).create().writeGraph(os, graph);
+ }
+ result = outputFile;
+ graph.tx().commit();
+ } catch (Exception e) {
+ graph.tx().rollback();
+ e.printStackTrace();
+ }
+ return result;
+
+ }
+
+ private static GraphSONMapper newGraphSONMapper(final Graph graph) {
+ final GraphSONMapper.Builder builder = graph.io(IoCore.graphson()).mapper();
+ // Different failure with embedded type info.
+ // builder.embedTypes(true);
+ return builder.create();
+ }
+
+ public boolean importJsonGraph(TitanGraph graph, String graphJsonFile,
+ List<ImmutablePair<String, String>> propertiesCriteriaToDelete) {
+
+ boolean result = false;
+
+ InputStream is = null;
+
+ try {
+
+ if (propertiesCriteriaToDelete != null) {
+ for (Entry<String, String> entry : propertiesCriteriaToDelete
+
+ ) {
+
+ String key = entry.getKey();
+ String value = entry.getValue();
+ Iterator iterator = graph.query().has(key, value).vertices().iterator();
+ while (iterator.hasNext()) {
+ Vertex vertex = (Vertex) iterator.next();
+ vertex.remove();
+ System.out.println("Remove vertex of type " + key + " and value " + value);
+ }
+
+ }
+ }
+ File file = new File(graphJsonFile);
+ if (false == file.isFile()) {
+ System.out.println("File " + graphJsonFile + " cannot be found.");
+ return result;
+ }
+
+ is = new BufferedInputStream(new FileInputStream(graphJsonFile));
+ System.out.println("Before importing file " + graphJsonFile);
+
+ // GraphSONReader.inputGraph(graph, graphJsonFile);
+ GraphSONReader create = GraphSONReader.build().create();
+ create.readGraph(is, graph);
+
+ // graph.commit();
+ graph.tx().commit();
+
+ result = true;
+
+ } catch (Exception e) {
+ System.out.println("Failed to import graph " + e.getMessage());
+ e.printStackTrace();
+ // graph.rollback();
+ graph.tx().rollback();
+ } finally {
+ try {
+ if (is != null) {
+ is.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ return result;
+
+ }
+
+ public String findErrorInJsonGraph(TitanGraph graph, String outputDirectory) {
+
+ boolean runVertexScan = false;
+ boolean runEdgeScan = false;
+
+ String result = null;
+
+ // GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
+
+ String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".json";
+
+ OutputStream out = null;
+ try {
+ out = new BufferedOutputStream(new FileOutputStream(outputFile));
+
+ if (runEdgeScan) {
+
+ Vertex vertexFrom = null;
+ Vertex vertexTo = null;
+ Edge edge = null;
+
+ // Iterable<Edge> edges = graph.getEdges();
+ // Iterable<Edge> edges = graph.query().edges();
+ Iterable<TitanEdge> edges = graph.query().edges();
+ // Iterator<Edge> iterator = edges.iterator();
+ Iterator<TitanEdge> iterator = edges.iterator();
+ while (iterator.hasNext()) {
+
+ try {
+
+ edge = iterator.next();
+
+ // vertexFrom = edge.getVertex(Direction.OUT);
+ // vertexTo = edge.getVertex(Direction.IN);
+ vertexFrom = edge.outVertex();
+ vertexTo = edge.inVertex();
+
+ BaseConfiguration conf = new BaseConfiguration();
+ conf.setProperty("storage.backend", "inmemory");
+ TitanGraph openGraph = Utils.openGraph(conf);
+
+ TitanVertex addVertexFrom = openGraph.addVertex();
+ // ElementHelper.setProperties(addVertexFrom,
+ // ElementHelper.getProperties(vertexFrom));
+ Utils.setProperties(addVertexFrom, Utils.getProperties(vertexFrom));
+
+ TitanVertex addVertexTo = openGraph.addVertex();
+ // ElementHelper.setProperties(addVertexTo,
+ // ElementHelper.getProperties(vertexTo));
+ Utils.setProperties(addVertexTo, Utils.getProperties(vertexTo));
+
+ // Edge addEdge = openGraph.addEdge(null, addVertexFrom,
+ // addVertexTo, edge.getLabel());
+
+ // Edge edge = tGraph.addEdge(null,
+ // fromV.left().value(), toV.left().value(), type);
+
+ Edge addEdge = addVertexFrom.addEdge(edge.label(), addVertexTo);
+ // ElementHelper.setProperties(addEdge,
+ // ElementHelper.getProperties(edge));
+ Utils.setProperties(addEdge, Utils.getProperties(edge));
+
+ // log.info("fromVertex=" +
+ // ElementHelper.getProperties(vertexFrom));
+ log.info("fromVertex=" + Utils.getProperties(vertexFrom));
+ // log.info("toVertex=" +
+ // ElementHelper.getProperties(vertexTo));
+ log.info("toVertex=" + Utils.getProperties(vertexTo));
+ // log.info("edge=" + edge.getLabel() + " " +
+ // ElementHelper.getProperties(edge));
+ log.info("edge=" + edge.label() + " " + Utils.getProperties(edge));
+
+ // GraphSONWriter.outputGraph(openGraph, outputFile);
+ GraphSONWriter create = GraphSONWriter.build().create();
+ create.writeGraph(out, openGraph);
+
+ // openGraph.rollback();
+ openGraph.tx().rollback();
+
+ } catch (Exception e) {
+ e.printStackTrace();
+
+ // log.error("fromVertex=" +
+ // ElementHelper.getProperties(vertexFrom));
+ log.error("fromVertex=" + Utils.getProperties(vertexFrom));
+ // log.error("toVertex=" +
+ // ElementHelper.getProperties(vertexTo));
+ log.error("toVertex=" + Utils.getProperties(vertexTo));
+ // log.error("edge=" + edge.getLabel() + " " +
+ // ElementHelper.getProperties(edge));
+ log.error("edge=" + edge.label() + " " + Utils.getProperties(edge));
+
+ break;
+
+ }
+ }
+
+ // graph.rollback();
+ graph.tx().rollback();
+
+ }
+
+ if (runVertexScan) {
+
+ Vertex vertex = null;
+ // Iterable<Vertex> vertices = graph.getVertices();
+
+ // Iterator<Vertex> iteratorVertex = vertices.iterator();
+ Iterator<Vertex> iteratorVertex = graph.vertices();
+ while (iteratorVertex.hasNext()) {
+
+ try {
+
+ vertex = iteratorVertex.next();
+
+ // Iterable<Edge> edges2 =
+ // vertex.getEdges(Direction.BOTH);
+
+ // Iterator<Edge> iterator2 = edges2.iterator();
+ Iterator<Edge> iterator2 = vertex.edges(Direction.BOTH);
+ if (false == iterator2.hasNext()) {
+
+ BaseConfiguration conf = new BaseConfiguration();
+ conf.setProperty("storage.backend", "inmemory");
+ TitanGraph openGraph = Utils.openGraph(conf);
+
+ TitanVertex addVertexFrom = openGraph.addVertex();
+ // ElementHelper.setProperties(addVertexFrom,
+ // ElementHelper.getProperties(vertex));
+ Utils.setProperties(addVertexFrom, Utils.getProperties(vertex));
+
+ // log.info("fromVertex=" +
+ // ElementHelper.getProperties(addVertexFrom));
+ log.info("fromVertex=" + Utils.getProperties(addVertexFrom));
+
+ // GraphSONWriter.outputGraph(openGraph,
+ // outputFile);
+ GraphSONWriter create = GraphSONWriter.build().create();
+ create.writeGraph(out, openGraph);
+
+ // openGraph.rollback();
+ openGraph.tx().rollback();
+
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+
+ // log.error("vertex=" +
+ // ElementHelper.getProperties(vertex));
+
+ GraphPropertiesDictionary[] values = GraphPropertiesDictionary.values();
+
+ // Object property1 =
+ // vertex.getProperty(GraphPropertiesDictionary.HEALTH_CHECK.getProperty());
+ Object property1 = vertex.value(GraphPropertiesDictionary.HEALTH_CHECK.getProperty());
+ System.out.println(property1);
+
+ // Object property2 = vertex.getProperty("healthcheck");
+ Object property2 = vertex.value("healthcheck");
+ System.out.println(property2);
+
+ // for (GraphPropertiesDictionary value : values) {
+ //
+ // System.out.println(property);
+ // }
+
+ break;
+
+ }
+ }
+
+ // graph.rollback();
+ graph.tx().rollback();
+
+ }
+
+ // Iterable<Vertex> vertices2 =
+ // graph.getVertices(GraphPropertiesDictionary.HEALTH_CHECK.getProperty(),
+ // "GOOD");
+ Iterable<TitanVertex> vertices2 = graph.query()
+ .has(GraphPropertiesDictionary.HEALTH_CHECK.getProperty(), "GOOD").vertices();
+ ;
+ Vertex next = vertices2.iterator().next();
+
+ BaseConfiguration conf = new BaseConfiguration();
+ conf.setProperty("storage.backend", "inmemory");
+ TitanGraph openGraph = Utils.openGraph(conf);
+
+ // TitanVertex addVertexFrom = openGraph.addVertex();
+ //
+ // addVertexFrom.setProperty(GraphPropertiesDictionary.HEALTH_CHECK.getProperty(),
+ // "GOOD");
+ // addVertexFrom.setProperty("healthcheck",
+ // next.getProperty("healthcheck"));
+ //
+ // //next.remove();
+ //
+ // next.removeProperty("healthcheck");
+ // next.removeProperty("healthcheckis");
+ //
+ // next.remove();
+
+ // GraphSONWriter.outputGraph(openGraph, outputFile);
+
+ for (NodeTypeEnum nodeTypeEnum : NodeTypeEnum.values()) {
+ removeNodesByLabel(graph, nodeTypeEnum.getName());
+ }
+
+ // GraphSONWriter.outputGraph(graph, outputFile);
+
+ GraphSONWriter create = GraphSONWriter.build().create();
+ create.writeGraph(out, graph);
+
+ // graph.rollback();
+ graph.tx().rollback();
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ // graph.rollback();
+ graph.tx().rollback();
+ } finally {
+ try {
+ if (out != null) {
+ out.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ return result;
+
+ }
+
+ private void removeNodesByLabel(TitanGraph graph, String label) {
+ Iterable<TitanVertex> vertices = graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), label)
+ .vertices();
+ Iterator<TitanVertex> iterator = vertices.iterator();
+ while (iterator.hasNext()) {
+ Vertex next2 = iterator.next();
+ next2.remove();
+ }
+ }
+
+ public static void clearGraph(TitanGraph graph) {
+
+ Iterable<TitanVertex> vertices = graph.query().vertices();
+
+ long erased = 0;
+
+ if (vertices != null) {
+ Iterator<TitanVertex> iterator = vertices.iterator();
+ while (iterator.hasNext()) {
+ Vertex vertex = iterator.next();
+ // graph.removeVertex(vertex);
+ vertex.remove();
+ erased++;
+ }
+
+ }
+
+ System.out.println("After erasing " + erased + " vertices.");
+ // graph.commit();
+ graph.tx().commit();
+ }
+
+ public String exportUsers(TitanGraph graph, String outputDirectory) {
+
+ List<Map<String, Object>> users = new ArrayList<>();
+ String result = null;
+
+ // GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
+
+ String outputFile = outputDirectory + File.separator + "users." + System.currentTimeMillis() + ".json";
+
+ FileWriter fileWriter = null;
+ try {
+
+ TitanGraphQuery graphQuery = graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(),
+ NodeTypeEnum.User.getName());
+
+ @SuppressWarnings("unchecked")
+ Iterable<TitanVertex> vertices = graphQuery.vertices();
+
+ if (vertices != null) {
+ for (Vertex v : vertices) {
+ Map<String, Object> properties = getProperties(v);
+ properties.remove(GraphPropertiesDictionary.LABEL.getProperty());
+ users.add(properties);
+ }
+ }
+
+ graph.tx().commit();
+
+ String jsonUsers = gson.toJson(users);
+
+ fileWriter = new FileWriter(outputFile);
+ fileWriter.write(jsonUsers);
+
+ result = outputFile;
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ graph.tx().rollback();
+ } finally {
+ try {
+ if (fileWriter != null) {
+ fileWriter.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ return result;
+
+ }
+
+ public Map<String, Object> getProperties(Element element) {
+
+ Map<String, Object> result = new HashMap<String, Object>();
+ ;
+
+ if (element.keys() != null && element.keys().size() > 0) {
+ Map<String, Property> propertyMap = ElementHelper.propertyMap(element,
+ element.keys().toArray(new String[element.keys().size()]));
+
+ for (Entry<String, Property> entry : propertyMap.entrySet()) {
+ String key = entry.getKey();
+ Object value = entry.getValue().value();
+
+ result.put(key, value);
+ }
+ }
+ return result;
+ }
+
+ public boolean exportUsers(String[] args) {
+
+ TitanGraph graph = null;
+ try {
+ String titanFileLocation = args[1];
+ String outputDirectory = args[2];
+ graph = openGraph(titanFileLocation);
+
+ String result = exportUsers(graph, outputDirectory);
+
+ if (result == null) {
+ return false;
+ }
+
+ System.out.println("Exported file=" + result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return false;
+ } finally {
+ if (graph != null) {
+ // graph.shutdown();
+ graph.close();
+ }
+ }
+
+ return true;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
new file mode 100644
index 0000000000..77163b4cc5
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
@@ -0,0 +1,364 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.poi.hssf.usermodel.HSSFWorkbook;
+import org.apache.poi.ss.usermodel.Row;
+import org.apache.poi.ss.usermodel.Sheet;
+import org.apache.poi.ss.usermodel.Workbook;
+import org.jdom2.Document;
+import org.jdom2.Element;
+import org.jdom2.filter.ElementFilter;
+import org.jdom2.input.SAXBuilder;
+import org.jdom2.util.IteratorIterable;
+
+public class GraphMLDataAnalyzer {
+
+ private static final String[] COMPONENT_SHEET_HEADER = { "uniqueId", "type", "name", "toscaResourceName",
+ "resourceType", "version", "deleted", "hasNonCalculatedReqCap" };
+ private static final String[] COMPONENT_INSTANCES_SHEET_HEADER = { "uniqueId", "name", "originUid", "originType",
+ "containerUid" };
+
+ public String analyzeGraphMLData(String[] args) {
+ String result = null;
+ try {
+ String mlFileLocation = args[0];
+ result = _analyzeGraphMLData(mlFileLocation);
+ System.out.println("Analyzed ML file=" + mlFileLocation + ", XLS result=" + result);
+ } catch (Exception e) {
+ e.printStackTrace();
+ return null;
+ }
+ return result;
+ }
+
+ private String _analyzeGraphMLData(String mlFileLocation) throws Exception {
+
+ // Parse ML file
+ SAXBuilder builder = new SAXBuilder();
+ File xmlFile = new File(mlFileLocation);
+ Document document = (Document) builder.build(xmlFile);
+
+ // XLS data file name
+ String outputFile = mlFileLocation.replace(".graphml", ".xls");
+ Workbook wb = new HSSFWorkbook();
+ FileOutputStream fileOut = new FileOutputStream(outputFile);
+ writeComponents(wb, document);
+ writeComponentInstances(wb, document);
+ wb.write(fileOut);
+ fileOut.close();
+ return outputFile;
+ }
+
+ private void writeComponents(Workbook wb, Document document) {
+ Sheet componentsSheet = wb.createSheet("Components");
+ Row currentRow = componentsSheet.createRow(0);
+ for (int i = 0; i < COMPONENT_SHEET_HEADER.length; i++) {
+ currentRow.createCell(i).setCellValue(COMPONENT_SHEET_HEADER[i]);
+ }
+
+ List<ComponentRow> components = getComponents(document);
+ int rowNum = 1;
+ for (ComponentRow row : components) {
+ currentRow = componentsSheet.createRow(rowNum++);
+ currentRow.createCell(0).setCellValue(row.getUniqueId());
+ currentRow.createCell(1).setCellValue(row.getType());
+ currentRow.createCell(2).setCellValue(row.getName());
+ currentRow.createCell(3).setCellValue(row.getToscaResourceName());
+ currentRow.createCell(4).setCellValue(row.getResourceType());
+ currentRow.createCell(5).setCellValue(row.getVersion());
+ currentRow.createCell(6).setCellValue(row.getIsDeleted() != null ? row.getIsDeleted().toString() : "false");
+ currentRow.createCell(7).setCellValue(row.getHasNonCalculatedReqCap());
+ }
+ }
+
+ private void writeComponentInstances(Workbook wb, Document document) {
+ Sheet componentsSheet = wb.createSheet("ComponentInstances");
+ Row currentRow = componentsSheet.createRow(0);
+ for (int i = 0; i < COMPONENT_INSTANCES_SHEET_HEADER.length; i++) {
+ currentRow.createCell(i).setCellValue(COMPONENT_INSTANCES_SHEET_HEADER[i]);
+ }
+ List<ComponentInstanceRow> components = getComponentInstances(document);
+ int rowNum = 1;
+ for (ComponentInstanceRow row : components) {
+ currentRow = componentsSheet.createRow(rowNum++);
+ currentRow.createCell(0).setCellValue(row.getUniqueId());
+ currentRow.createCell(1).setCellValue(row.getName());
+ currentRow.createCell(2).setCellValue(row.getOriginUid());
+ currentRow.createCell(3).setCellValue(row.getOriginType());
+ currentRow.createCell(4).setCellValue(row.getContainerUid());
+ }
+ }
+
+ private List<ComponentRow> getComponents(Document document) {
+ List<ComponentRow> res = new ArrayList<>();
+ Element root = document.getRootElement();
+ ElementFilter filter = new ElementFilter("graph");
+ Element graph = root.getDescendants(filter).next();
+ filter = new ElementFilter("edge");
+ IteratorIterable<Element> edges = graph.getDescendants(filter);
+ Set<String> componentsHavingReqOrCap = new HashSet<>();
+ filter = new ElementFilter("data");
+ for (Element edge : edges) {
+ IteratorIterable<Element> dataNodes = edge.getDescendants(filter);
+ for (Element data : dataNodes) {
+ String attributeValue = data.getAttributeValue("key");
+ switch (attributeValue) {
+ case "labelE":
+ String edgeLabel = data.getText();
+ if (edgeLabel.equals("REQUIREMENT") || edgeLabel.equals("CAPABILITY")) {
+ componentsHavingReqOrCap.add(edge.getAttributeValue("source"));
+ }
+ break;
+ }
+ }
+ }
+
+ filter = new ElementFilter("node");
+ IteratorIterable<Element> nodes = graph.getDescendants(filter);
+ filter = new ElementFilter("data");
+ for (Element element : nodes) {
+ IteratorIterable<Element> dataNodes = element.getDescendants(filter);
+ ComponentRow componentRow = new ComponentRow();
+ boolean isComponent = false;
+ for (Element data : dataNodes) {
+ String attributeValue = data.getAttributeValue("key");
+ switch (attributeValue) {
+ case "nodeLabel":
+ String nodeLabel = data.getText();
+ if (nodeLabel.equals("resource") || nodeLabel.equals("service")) {
+ isComponent = true;
+ componentRow.setType(nodeLabel);
+ String componentId = element.getAttributeValue("id");
+ componentRow.setHasNonCalculatedReqCap(componentsHavingReqOrCap.contains(componentId));
+ }
+ break;
+ case "uid":
+ componentRow.setUniqueId(data.getText());
+ break;
+ case "name":
+ componentRow.setName(data.getText());
+ break;
+ case "toscaResourceName":
+ componentRow.setToscaResourceName(data.getText());
+ break;
+ case "resourceType":
+ componentRow.setResourceType(data.getText());
+ break;
+ case "version":
+ componentRow.setVersion(data.getText());
+ break;
+ case "deleted":
+ componentRow.setIsDeleted(Boolean.parseBoolean(data.getText()));
+ break;
+ default:
+ break;
+ }
+ }
+ if (isComponent) {
+ res.add(componentRow);
+ }
+ }
+ return res;
+ }
+
+ private List<ComponentInstanceRow> getComponentInstances(Document document) {
+ List<ComponentInstanceRow> res = new ArrayList<>();
+ Element root = document.getRootElement();
+ ElementFilter filter = new ElementFilter("graph");
+ Element graph = root.getDescendants(filter).next();
+ filter = new ElementFilter("node");
+ IteratorIterable<Element> nodes = graph.getDescendants(filter);
+ filter = new ElementFilter("data");
+ for (Element element : nodes) {
+ IteratorIterable<Element> dataNodes = element.getDescendants(filter);
+ ComponentInstanceRow componentInstRow = new ComponentInstanceRow();
+ boolean isComponentInst = false;
+ for (Element data : dataNodes) {
+ String attributeValue = data.getAttributeValue("key");
+ switch (attributeValue) {
+ case "nodeLabel":
+ String nodeLabel = data.getText();
+ if (nodeLabel.equals("resourceInstance")) {
+ isComponentInst = true;
+ }
+ break;
+ case "uid":
+ componentInstRow.setUniqueId(data.getText());
+ break;
+ case "name":
+ componentInstRow.setName(data.getText());
+ break;
+ case "originType":
+ componentInstRow.setOriginType(data.getText());
+ break;
+ default:
+ break;
+ }
+ }
+ if (isComponentInst) {
+ // Assuming the uid is in standard form of
+ // <container>.<origin>.<name>
+ String uniqueId = componentInstRow.getUniqueId();
+ if (uniqueId != null) {
+ String[] split = uniqueId.split("\\.");
+ if (split.length == 3) {
+ componentInstRow.setContainerUid(split[0]);
+ componentInstRow.setOriginUid(split[1]);
+ }
+ }
+ res.add(componentInstRow);
+ }
+ }
+ return res;
+ }
+
+ private class ComponentRow {
+
+ private String uniqueId;
+ private String type;
+ private String name;
+ private String toscaResourceName;
+ private String resourceType;
+ private String version;
+ private Boolean isDeleted;
+ private Boolean hasNonCalculatedReqCap;
+
+ public Boolean getHasNonCalculatedReqCap() {
+ return hasNonCalculatedReqCap;
+ }
+
+ public void setHasNonCalculatedReqCap(Boolean hasNonCalculatedReqCap) {
+ this.hasNonCalculatedReqCap = hasNonCalculatedReqCap;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getUniqueId() {
+ return uniqueId;
+ }
+
+ public void setUniqueId(String uniqueId) {
+ this.uniqueId = uniqueId;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getToscaResourceName() {
+ return toscaResourceName;
+ }
+
+ public void setToscaResourceName(String toscaResourceName) {
+ this.toscaResourceName = toscaResourceName;
+ }
+
+ public String getResourceType() {
+ return resourceType;
+ }
+
+ public void setResourceType(String resourceType) {
+ this.resourceType = resourceType;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public Boolean getIsDeleted() {
+ return isDeleted;
+ }
+
+ public void setIsDeleted(Boolean deleted) {
+ this.isDeleted = deleted;
+ }
+ }
+
+ private class ComponentInstanceRow {
+ private String uniqueId;
+ private String name;
+ private String originUid;
+ private String originType;
+ private String containerUid;
+
+ public String getContainerUid() {
+ return containerUid;
+ }
+
+ public void setContainerUid(String containerUid) {
+ this.containerUid = containerUid;
+ }
+
+ public String getUniqueId() {
+ return uniqueId;
+ }
+
+ public void setUniqueId(String uniqueId) {
+ this.uniqueId = uniqueId;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getOriginUid() {
+ return originUid;
+ }
+
+ public void setOriginUid(String componentUid) {
+ this.originUid = componentUid;
+ }
+
+ public String getOriginType() {
+ return originType;
+ }
+
+ public void setOriginType(String originType) {
+ this.originType = originType;
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java
new file mode 100644
index 0000000000..812d534f49
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java
@@ -0,0 +1,388 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.cassandra.ComponentCassandraDao;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.Product;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.cache.ComponentCache;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.ProductOperation;
+import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
+import org.openecomp.sdc.be.resources.data.ComponentCacheData;
+import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.common.util.SerializationUtils;
+import org.openecomp.sdc.common.util.ZipUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
+
+import fj.data.Either;
+
+/**
+ * Created by esofer on 9/1/2016.
+ */
+public class PopulateComponentCache {
+
+ private static Logger log = LoggerFactory.getLogger(PopulateComponentCache.class.getName());
+
+ @Autowired
+ protected ComponentCassandraDao componentCassandraDao;
+
+ @Autowired
+ protected ResourceOperation resourceOperation;
+
+ @Autowired
+ protected ServiceOperation serviceOperation;
+
+ @Autowired
+ protected ProductOperation productOperation;
+
+ @Autowired
+ protected ComponentCache componentCache;
+
+ private void exit(String stage, int i) {
+ log.error("Failed on " + stage);
+ System.exit(i);
+
+ }
+
+ public void populateCache() {
+ populateCache(ComponentTypeEnum.RESOURCE);
+ populateCache(ComponentTypeEnum.SERVICE);
+ populateCache(ComponentTypeEnum.PRODUCT);
+ }
+
+ private void populateCache(ComponentTypeEnum componentTypeEnum) {
+
+ List<String> list = new ArrayList<>();
+ Either<TitanGraph, TitanOperationStatus> graph = resourceOperation.getTitanGenericDao().getGraph();
+ TitanGraph titanGraph = graph.left().value();
+ Iterable vertices = titanGraph.query()
+ .has(GraphPropertiesDictionary.LABEL.getProperty(), componentTypeEnum.name().toLowerCase()).vertices();
+
+ Iterator iterator = vertices.iterator();
+ while (iterator.hasNext()) {
+ TitanVertex vertex = (TitanVertex) iterator.next();
+
+ // VertexProperty<Object> state =
+ // vertex.property(GraphPropertiesDictionary.STATE.getProperty());
+ // String stateValue = (String)state.value();
+
+ // if (false ==
+ // stateValue.equalsIgnoreCase(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name())
+ // ) {
+ VertexProperty<Object> uid = vertex.property(GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+ String uidValue = (String) uid.value();
+
+ list.add(uidValue);
+ // }
+ }
+
+ int counter = 0;
+ for (String componentUid : list) {
+
+ long time1 = System.currentTimeMillis();
+
+ /////////////////////////////////////////////////////////////////////////////////////
+ // Pay attention. The component is fetched from the cache in case it
+ ///////////////////////////////////////////////////////////////////////////////////// is
+ ///////////////////////////////////////////////////////////////////////////////////// already
+ ///////////////////////////////////////////////////////////////////////////////////// there.
+ /////////////////////////////////////////////////////////////////////////////////////
+ Component component = null;
+ switch (componentTypeEnum) {
+ case RESOURCE:
+ Either<Resource, StorageOperationStatus> resourceRes = resourceOperation.getComponent(componentUid,
+ false);
+ if (resourceRes.isRight()) {
+ exit("get resource", 1);
+ }
+ component = resourceRes.left().value();
+ break;
+ case SERVICE:
+ Either<Service, StorageOperationStatus> serviceRes = serviceOperation.getComponent(componentUid, false);
+ if (serviceRes.isRight()) {
+ exit("get service", 1);
+ }
+ component = serviceRes.left().value();
+ break;
+ case PRODUCT:
+ Either<Product, StorageOperationStatus> productRes = productOperation.getComponent(componentUid, false);
+ if (productRes.isRight()) {
+ exit("get product", 1);
+ }
+ component = productRes.left().value();
+ break;
+ default:
+ break;
+ }
+
+ if (component == null) {
+ exit("get component", 1);
+ }
+
+ long time2 = System.currentTimeMillis();
+ // System.out.println("fetch resource " + resource.getName());
+ // System.out.println("fetch resource time is " + (time2 - time1) +
+ // " ms");
+
+ boolean setComponent = componentCache.setComponent(component, componentTypeEnum.getNodeType());
+ if (setComponent) {
+ counter++;
+ }
+
+ /*
+ * Either<byte[], Boolean> valueRes =
+ * SerializationUtils.serializeExt(component);
+ *
+ * if (valueRes.isRight()) { exit("serialize component " +
+ * component.getName(), 2); } byte[] value =
+ * valueRes.left().value(); log.info("byte[] size is " +
+ * value.length); //System.out.println("byte[] size is " +
+ * value.length);
+ *
+ * byte[] zipped = null; try { zipped = ZipUtil.zipBytes(value);
+ * //System.out.println("byte[] size after zip is " +
+ * zipped.length);
+ *
+ * ComponentCacheData componentCacheData = new ComponentCacheData();
+ * componentCacheData.setDataAsArray(zipped);
+ * componentCacheData.setIsZipped(true);
+ * componentCacheData.setId(componentUid);
+ * componentCacheData.setModificationTime(new
+ * Date(component.getLastUpdateDate()));
+ * componentCacheData.setType(component.getComponentType().name().
+ * toLowerCase());
+ *
+ * long averageInsertTimeInMilli =
+ * writeResourceToCassandraComponent(componentCacheData); log.
+ * info("After adding component {} to cassandra. Insert time is {} ms."
+ * , componentUid, averageInsertTimeInMilli);
+ *
+ * } catch (IOException e) { // TODO Auto-generated catch block
+ * e.printStackTrace(); }
+ */
+
+ }
+
+ log.debug("The number of saved components of type {} is {}. Total size is {}", componentTypeEnum, counter,
+ list.size());
+
+ }
+
+ private long writeResourceToCassandraComponent(ComponentCacheData componentCacheData) {
+
+ long startTime = System.currentTimeMillis();
+
+ // call to cassandra read
+ CassandraOperationStatus saveArtifact = componentCassandraDao.saveComponent(componentCacheData);
+ if (saveArtifact != CassandraOperationStatus.OK) {
+ exit("writeResourceToCassandra", 3);
+ }
+
+ long endTime = System.currentTimeMillis();
+
+ return (endTime - startTime);
+ }
+
+ private void deserializeByThreads(List<ESArtifactData> list, ExecutorService executor, int threadNumber) {
+
+ long fullSearchStart = System.currentTimeMillis();
+ // for (int k =0; k < parts; k++) {
+
+ List<List<ESArtifactData>> lists = new ArrayList<>();
+ for (int i = 0; i < threadNumber; i++) {
+ lists.add(new ArrayList<>());
+ }
+
+ List<Future<List<Resource>>> results = new ArrayList<>();
+ for (int i = 0; i < list.size(); i++) {
+ lists.get(i % threadNumber).add(list.get(i));
+ }
+
+ for (int i = 0; i < threadNumber; i++) {
+
+ // Callable<List<Resource>> worker = new
+ // MyDesrializabletCallable(lists.get(i), i);
+ Callable<List<Resource>> worker = new My3rdPartyDesrializabletCallable(lists.get(i), i);
+ Future<List<Resource>> submit = executor.submit(worker);
+ results.add(submit);
+ }
+
+ long fullSearchStart2 = System.currentTimeMillis();
+ for (Future<List<Resource>> future : results) {
+ try {
+ while (false == future.isDone()) {
+ Thread.sleep(1);
+ }
+
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ long fullSearchEnd2 = System.currentTimeMillis();
+ log.info("esofer time wait to threads finish " + ((fullSearchEnd2 - fullSearchStart2)) + " ms");
+ // }
+ long fullSearchEnd = System.currentTimeMillis();
+
+ log.info("esofer full desrialize time " + ((fullSearchEnd - fullSearchStart)) + " ms");
+ System.out.println("esofer full desrialize time " + ((fullSearchEnd - fullSearchStart)) + " ms");
+ }
+
+ public class MyDesrializabletCallable implements Callable<List<Resource>> {
+
+ List<ESArtifactData> list;
+ int i;
+
+ public MyDesrializabletCallable(List<ESArtifactData> list, int i) {
+ super();
+ this.list = list;
+ this.i = i;
+ }
+
+ @Override
+ public List<Resource> call() throws Exception {
+ List<Resource> resources = new ArrayList<>();
+ long startSer = System.currentTimeMillis();
+ long endSer = System.currentTimeMillis();
+ long startUnzip = System.currentTimeMillis();
+ long endUnzip = System.currentTimeMillis();
+
+ long avgUnzip = 0;
+ long avgSer = 0;
+ for (ESArtifactData esArtifactData : list) {
+
+ byte[] dataAsArray = esArtifactData.getDataAsArray();
+ startUnzip = System.nanoTime();
+ dataAsArray = ZipUtil.unzip(dataAsArray);
+ endUnzip = System.nanoTime();
+ avgUnzip += (endUnzip - startUnzip);
+
+ startSer = System.nanoTime();
+ Either<Object, Boolean> deserialize = SerializationUtils.deserialize(dataAsArray);
+ endSer = System.nanoTime();
+ avgSer += (endSer - startSer);
+ // Either<Object, Boolean> deserialize =
+ // SerializationUtils.deserialize(esArtifactData.getDataAsArray());
+ if (deserialize.isRight()) {
+ exit("convertByteArrayToResource " + deserialize.right().value(), 5);
+ }
+
+ Resource resource = (Resource) deserialize.left().value();
+ resources.add(resource);
+ // System.out.println("After desrialize T[" + i + "]resource " +
+ // resource.getUniqueId());
+ }
+
+ System.out.println("After desrialize average desrialize " + list.size() + " T[" + i + "] "
+ + (avgSer / 1000 / list.size()) + " micro");
+ System.out.println(
+ "After desrialize average unzip T[" + i + "] " + (avgUnzip / 1000 / list.size()) + " micro");
+
+ ////////////////////////
+ // maybe register most frequently used classes on conf
+ // write
+ // byte barray[] = conf.asByteArray(mySerializableObject);
+ // read
+ // MyObject object = (MyObject)conf.asObject(barray);
+
+ return resources;
+ }
+ }
+
+ public class My3rdPartyDesrializabletCallable implements Callable<List<Resource>> {
+
+ List<ESArtifactData> list;
+ int i;
+
+ public My3rdPartyDesrializabletCallable(List<ESArtifactData> list, int i) {
+ super();
+ this.list = list;
+ this.i = i;
+ }
+
+ @Override
+ public List<Resource> call() throws Exception {
+ List<Resource> resources = new ArrayList<>();
+ long startSer = System.currentTimeMillis();
+ long endSer = System.currentTimeMillis();
+ long startUnzip = System.currentTimeMillis();
+ long endUnzip = System.currentTimeMillis();
+
+ long avgUnzip = 0;
+ long avgSer = 0;
+ for (ESArtifactData esArtifactData : list) {
+
+ byte[] dataAsArray = esArtifactData.getDataAsArray();
+ startUnzip = System.nanoTime();
+ dataAsArray = ZipUtil.unzip(dataAsArray);
+ endUnzip = System.nanoTime();
+ avgUnzip += (endUnzip - startUnzip);
+
+ startSer = System.nanoTime();
+
+ Either<Resource, Boolean> deserializeExt = SerializationUtils.deserializeExt(dataAsArray,
+ Resource.class, "");
+
+ if (deserializeExt.isLeft()) {
+ Resource resource = deserializeExt.left().value();
+ // System.out.println("=============================================");
+ // System.out.println(resource.getCapabilities().size());
+ // System.out.println(resource.getRequirements().size());
+ endSer = System.nanoTime();
+ avgSer += (endSer - startSer);
+ resources.add(resource);
+ // System.out.println("After desrialize T[" + i + "]resource
+ // " + resource.getUniqueId());
+ }
+ }
+
+ System.out.println("After desrialize average desrialize " + list.size() + " T[" + i + "] "
+ + (avgSer / 1000 / list.size()) + " micro");
+ System.out.println(
+ "After desrialize average unzip T[" + i + "] " + (avgUnzip / 1000 / list.size()) + " micro");
+
+ return resources;
+ }
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
new file mode 100644
index 0000000000..9f15c83dd8
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
@@ -0,0 +1,103 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Created by mlando on 2/23/2016.
+ */
+public class ProductLogic {
+
+ private static Logger log = LoggerFactory.getLogger(ProductLogic.class.getName());
+
+ public boolean deleteAllProducts(String titanFile, String beHost, String bePort, String adminUser) {
+ log.debug("retrieving all products from graph");
+ RestUtils restUtils = null;
+ try {
+ List<String> productList = getAllProducts(titanFile);
+ restUtils = new RestUtils();
+ if (productList != null) {
+ for (String productUid : productList) {
+ Integer status = restUtils.deleteProduct(productUid, beHost, bePort, adminUser);
+ }
+ return true;
+ } else {
+ log.error("failed to get products from graph");
+ return false;
+ }
+ } finally {
+ if (restUtils != null) {
+ restUtils.closeClient();
+ }
+ }
+ }
+
+ private List<String> getAllProducts(String titanFile) {
+ TitanGraph graph = null;
+ try {
+ graph = openGraph(titanFile);
+ List<String> productsToDelete = new ArrayList<String>();
+ Iterable vertices = graph.query()
+ .has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Product.getName()).vertices();
+ if (vertices != null) {
+ Iterator<TitanVertex> iter = vertices.iterator();
+ while (iter.hasNext()) {
+ Vertex vertex = iter.next();
+ String id = vertex.value(GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+ productsToDelete.add(id);
+ }
+ }
+
+ graph.tx().commit();
+ return productsToDelete;
+ } catch (Exception e) {
+ e.printStackTrace();
+ graph.tx().rollback();
+ return null;
+
+ } finally {
+ if (graph != null) {
+ graph.close();
+ }
+ }
+ }
+
+ private TitanGraph openGraph(String titanFileLocation) {
+
+ TitanGraph graph = TitanFactory.open(titanFileLocation);
+
+ return graph;
+
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
new file mode 100644
index 0000000000..c256ca09a4
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
@@ -0,0 +1,83 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpUriRequest;
+
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+/**
+ * Created by mlando on 2/23/2016.
+ */
+public class RestUtils {
+
+ final String DELETE_PRODUCT = "http://%s:%s/sdc2/rest/v1/catalog/products/%s";
+ final Integer DELETE_SUCCSES_RESPONSE = 200;
+
+ private static Logger log = LoggerFactory.getLogger(RestUtils.class.getName());
+ CloseableHttpClient httpClient;
+
+ public RestUtils() {
+ this.httpClient = HttpClients.createDefault();
+ }
+
+ private CloseableHttpResponse exacuteRequest(HttpUriRequest httpRequest) throws IOException {
+ log.debug("received http request: {}", httpRequest.toString());
+ return httpClient.execute(httpRequest);
+ }
+
+ public void closeClient() {
+ log.debug("closing http client");
+ try {
+ this.httpClient.close();
+ log.debug("closed http client");
+ } catch (IOException e) {
+ log.debug("close http client failed", e);
+
+ }
+ }
+
+ public Integer deleteProduct(String productUid, String beHost, String bePort, String adminUser) {
+ String url = String.format(DELETE_PRODUCT, beHost, bePort, productUid);
+ HttpDelete deleteRequest = new HttpDelete(url);
+ deleteRequest.setHeader("USER_ID", adminUser);
+ try (CloseableHttpResponse response = this.httpClient.execute(deleteRequest)) {
+ int status = response.getStatusLine().getStatusCode();
+ if (DELETE_SUCCSES_RESPONSE.equals(status)) {
+ log.debug("Product uid:{} succsesfully deleted", productUid);
+ } else {
+ log.error("Product uid:{} delete failed status {}", productUid, status);
+ }
+ return status;
+ } catch (IOException e) {
+ log.error("Product uid:" + productUid + " delete failed with exception", e);
+ }
+ return null;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
new file mode 100644
index 0000000000..b480091723
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
@@ -0,0 +1,180 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.Utils;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanGraphQuery;
+
+public class UpdatePropertyOnVertex {
+
+ private static Logger log = LoggerFactory.getLogger(UpdatePropertyOnVertex.class.getName());
+
+ public Integer updatePropertyOnServiceAtLeastCertified(String titanFile, Map<String, Object> keyValueToSet,
+ List<Map<String, Object>> orCriteria) {
+
+ TitanGraph graph = null;
+
+ Integer numberOfUpdatedVertexes = 0;
+
+ try {
+ graph = openGraph(titanFile);
+
+ if (orCriteria != null && false == orCriteria.isEmpty()) {
+
+ for (Map<String, Object> criteria : orCriteria) {
+
+ TitanGraphQuery<? extends TitanGraphQuery> query = graph.query();
+
+ if (criteria != null && !criteria.isEmpty()) {
+ for (Map.Entry<String, Object> entry : criteria.entrySet()) {
+ query = query.has(entry.getKey(), entry.getValue());
+ }
+ }
+
+ Iterator iterator = query
+ .has(GraphPropertiesDictionary.STATE.getProperty(), LifecycleStateEnum.CERTIFIED.name())
+ .vertices().iterator();
+
+ boolean isFoundAtLeastOneCertifiedService = false;
+ while (iterator.hasNext()) {
+ Vertex vertex = (Vertex) iterator.next();
+
+ Map<String, Object> leftProps = Utils.getProperties(vertex);
+ boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
+ if (false == vertexLeftContainsRightProps) {
+ log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are: {}", criteria, leftProps);
+ continue;
+ }
+
+ isFoundAtLeastOneCertifiedService = true;
+ break;
+ }
+
+ if (true == isFoundAtLeastOneCertifiedService) {
+
+ Integer currentNumberOfUpdates = updateVertexes(keyValueToSet, graph, criteria);
+
+ if (currentNumberOfUpdates != null) {
+ numberOfUpdatedVertexes += currentNumberOfUpdates;
+ }
+
+ } else {
+ log.debug("No certified service was found for criteria {}", criteria);
+ }
+ }
+
+ }
+
+ // graph.commit();
+ graph.tx().commit();
+
+ return numberOfUpdatedVertexes;
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ // graph.rollback();
+ graph.tx().rollback();
+
+ return null;
+
+ } finally {
+ if (graph != null) {
+ // graph.shutdown();
+ graph.close();
+ }
+ }
+
+ }
+
+ private Integer updateVertexes(Map<String, Object> keyValueToSet, TitanGraph graph, Map<String, Object> criteria) {
+ Integer numberOfUpdatedVertexesPerService = 0;
+
+ TitanGraphQuery<? extends TitanGraphQuery> updateQuery = graph.query();
+
+ if (criteria != null && !criteria.isEmpty()) {
+ for (Map.Entry<String, Object> entry : criteria.entrySet()) {
+ updateQuery = updateQuery.has(entry.getKey(), entry.getValue());
+ }
+ }
+ Iterator updateIterator = updateQuery.vertices().iterator();
+
+ while (updateIterator.hasNext()) {
+
+ Vertex vertex = (Vertex) updateIterator.next();
+
+ Map<String, Object> leftProps = Utils.getProperties(vertex);
+
+ boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
+ if (false == vertexLeftContainsRightProps) {
+ log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}", criteria, leftProps);
+ continue;
+ }
+
+ if (keyValueToSet != null) {
+ for (Entry<String, Object> entry : keyValueToSet.entrySet()) {
+ String key = entry.getKey();
+ Object value = entry.getValue();
+
+ // vertex.setProperty(key, value);
+ vertex.property(key, value);
+ //if(log.isDebugEnabled()){
+ // log.debug("After setting vertex: {} {} with key value: {}, {}",
+ // vertex.getProperty(GraphPropertiesDictionary.NAME.getProperty()),
+ // vertex.getProperty(GraphPropertiesDictionary.VERSION.getProperty()),
+ // key, value);
+ //}
+ log.debug("After setting vertex: {} {} with key value: {}, {}",
+ vertex.property(GraphPropertiesDictionary.NAME.getProperty()),
+ vertex.property(GraphPropertiesDictionary.VERSION.getProperty()),
+ key, value);
+ numberOfUpdatedVertexesPerService++;
+ }
+ }
+
+ }
+
+ log.info(
+ "The number of updated services for criteria " + criteria + " is " + numberOfUpdatedVertexesPerService);
+ return numberOfUpdatedVertexesPerService;
+ }
+
+ public TitanGraph openGraph(String titanFileLocation) {
+
+ TitanGraph graph = TitanFactory.open(titanFileLocation);
+
+ return graph;
+
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java
new file mode 100644
index 0000000000..db8fee0fa2
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java
@@ -0,0 +1,132 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.GroupData;
+import org.slf4j.Logger;
+
+import fj.data.Either;
+
+public class AddGroupUuid {
+
+ public static boolean addGroupUuids(TitanGenericDao titanGenericDao, Logger log, boolean inTrsansaction) {
+
+ boolean result = true;
+
+ try {
+
+ log.debug("========================================================");
+ log.debug("Before find all groups");
+
+ Either<List<GroupData>, TitanOperationStatus> allGroups = titanGenericDao.getByCriteria(NodeTypeEnum.Group,
+ null, null, GroupData.class);
+
+ if (allGroups.isRight()) {
+ TitanOperationStatus status = allGroups.right().value();
+ log.debug("After finding all groups. Status is {}", status);
+ if (status != TitanOperationStatus.NOT_FOUND && status != TitanOperationStatus.OK) {
+ result = false;
+ return result;
+ } else {
+ return result;
+ }
+ }
+
+ List<GroupData> groups = allGroups.left().value();
+
+ log.info("The number of groups fetched is {}", groups == null ? 0 : groups.size());
+
+ int numberOfUpdates = 0;
+ if (false == groups.isEmpty()) {
+ Map<String, List<GroupData>> invariantIdToGroups = groups.stream()
+ .collect(Collectors.groupingBy(p -> p.getGroupDataDefinition().getInvariantUUID()));
+
+ // All the groups with the same invariantUUID should have the
+ // same group UUID since update VF flow with CSAR was not
+ // supported in the E2E environment.
+
+ log.info("The number of different invariantUuids is {}",
+ invariantIdToGroups == null ? 0 : invariantIdToGroups.size());
+
+ for (Entry<String, List<GroupData>> entry : invariantIdToGroups.entrySet()) {
+
+ String invariantUuid = entry.getKey();
+ List<GroupData> groupsData = entry.getValue();
+
+ StringBuilder builder = new StringBuilder();
+ groupsData.forEach(p -> builder.append(p.getGroupDataDefinition().getUniqueId() + ","));
+
+ String groupUUID = groupsData.get(0).getGroupDataDefinition().getGroupUUID();
+
+ if (groupUUID == null) {
+
+ groupUUID = UniqueIdBuilder.generateUUID();
+
+ log.debug("Before updating groups {} with groupUUID {}", builder.toString(), groupUUID);
+
+ for (GroupData groupData : groupsData) {
+
+ numberOfUpdates++;
+ groupData.getGroupDataDefinition().setGroupUUID(groupUUID);
+ Either<GroupData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(groupData,
+ GroupData.class);
+ if (updateNode.isRight()) {
+ log.error("Failed to update group " + groupData + ". Error is {}",
+ updateNode.right().value().toString());
+ result = false;
+ return result;
+ }
+
+ }
+
+ log.debug("After updating groups {} with groupUUID {}", builder.toString(), groupUUID);
+ }
+
+ }
+ }
+
+ log.info("The number of groups updated with groupUUID is " + numberOfUpdates);
+
+ return result;
+
+ } finally {
+ log.info("Finish updating groupUUIDs. Status is {}.", result);
+ if (inTrsansaction == false) {
+ if (result == false) {
+ titanGenericDao.rollback();
+ } else {
+ titanGenericDao.commit();
+ }
+ }
+ }
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java
new file mode 100644
index 0000000000..561cfb5a5b
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java
@@ -0,0 +1,144 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.resources.data.ArtifactData;
+import org.slf4j.Logger;
+
+import fj.data.Either;
+
+public class AllowMultipleHeats {
+
+ public static boolean removeAndUpdateHeatPlaceHolders(TitanGenericDao titanGenericDao, Logger log,
+ boolean inTrsansaction) {
+
+ boolean result = true;
+
+ try {
+
+ List<ArtifactData> artifactsToDelete = new ArrayList<>();
+ List<ArtifactData> artifactsToUpdate = new ArrayList<>();
+
+ String[] phLabels = { "heat", "heatvol", "heatnet" };
+
+ for (String artifactLabel : phLabels) {
+ Map<String, Object> properties = new HashMap<>();
+
+ properties.put(GraphPropertiesDictionary.ARTIFACT_LABEL.getProperty(), artifactLabel);
+
+ Either<List<ArtifactData>, TitanOperationStatus> allHeatArtifacts = titanGenericDao
+ .getByCriteria(NodeTypeEnum.ArtifactRef, properties, null, ArtifactData.class);
+
+ if (allHeatArtifacts.isRight()) {
+ TitanOperationStatus status = allHeatArtifacts.right().value();
+ if (status == TitanOperationStatus.NOT_FOUND) {
+ continue;
+ } else {
+ result = false;
+ return result;
+ }
+
+ }
+
+ List<ArtifactData> list = allHeatArtifacts.left().value();
+ log.debug("Found {} artifacts with label {}", (list == null ? 0 : list.size()), artifactLabel);
+
+ if (list != null && false == list.isEmpty()) {
+
+ for (ArtifactData artifactData : list) {
+ String esId = artifactData.getArtifactDataDefinition().getEsId();
+ if (esId == null || true == esId.isEmpty()) {
+ artifactsToDelete.add(artifactData);
+ } else {
+ artifactsToUpdate.add(artifactData);
+ }
+ }
+ }
+ }
+
+ if (false == artifactsToDelete.isEmpty()) {
+ for (ArtifactData artifactData : artifactsToDelete) {
+ // System.out.println("Going to delete artifact " +
+ // artifactData);
+ log.debug("Going to delete artifact {}", artifactData);
+ Either<ArtifactData, TitanOperationStatus> deleteNode = titanGenericDao.deleteNode(artifactData,
+ ArtifactData.class);
+ if (deleteNode.isRight()) {
+ log.error("Failed to delete artifact node {}", deleteNode.left().value());
+ result = false;
+ return result;
+ } else {
+ log.debug("Delete artifact node {}", deleteNode.left().value());
+ }
+ }
+ }
+
+ log.debug("Number of deleted artifacts is {}", artifactsToDelete.size());
+
+ int counter = 0;
+ if (false == artifactsToUpdate.isEmpty()) {
+ for (ArtifactData artifactData : artifactsToUpdate) {
+ // System.out.println("Going to update artifact " +
+ // artifactData);
+
+ if (artifactData.getArtifactDataDefinition().getMandatory() != null
+ && true == artifactData.getArtifactDataDefinition().getMandatory()) {
+ log.debug("Going to update artifact {}", artifactData);
+ counter++;
+ artifactData.getArtifactDataDefinition().setMandatory(false);
+ Either<ArtifactData, TitanOperationStatus> updatedNode = titanGenericDao
+ .updateNode(artifactData, ArtifactData.class);
+ if (updatedNode.isRight()) {
+ log.error("Failed to update artifact node {}", updatedNode.left().value());
+ result = false;
+ return result;
+ } else {
+ log.debug("Update artifact node {}", updatedNode.left().value());
+ }
+ }
+ }
+ }
+
+ log.debug("Number of updated artifacts is {}", counter);
+
+ return result;
+
+ } finally {
+ if (inTrsansaction == false) {
+ if (result == false) {
+ titanGenericDao.rollback();
+ } else {
+ titanGenericDao.commit();
+ }
+ }
+ }
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java
new file mode 100644
index 0000000000..b529935a38
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java
@@ -0,0 +1,538 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import org.openecomp.sdc.asdctool.impl.PopulateComponentCache;
+import org.openecomp.sdc.asdctool.impl.migration.v1607.CsarMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1610.TitanFixUtils;
+import org.openecomp.sdc.asdctool.impl.migration.v1610.ToscaArtifactsAlignment;
+import org.openecomp.sdc.be.auditing.api.IAuditingManager;
+import org.openecomp.sdc.be.auditing.impl.AuditingManager;
+import org.openecomp.sdc.be.components.distribution.engine.IDistributionEngine;
+import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
+import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic;
+import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic;
+import org.openecomp.sdc.be.components.impl.GroupBusinessLogic;
+import org.openecomp.sdc.be.components.impl.InputsBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ProductBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ProductComponentInstanceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ResourceImportManager;
+import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic;
+import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.dao.cassandra.ComponentCassandraDao;
+import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+import org.openecomp.sdc.be.dao.impl.AuditingDao;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
+import org.openecomp.sdc.be.impl.ComponentsUtils;
+import org.openecomp.sdc.be.model.cache.ApplicationDataTypeCache;
+import org.openecomp.sdc.be.model.cache.ComponentCache;
+import org.openecomp.sdc.be.model.operations.api.IAdditionalInformationOperation;
+import org.openecomp.sdc.be.model.operations.api.IElementOperation;
+import org.openecomp.sdc.be.model.operations.api.IGraphLockOperation;
+import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
+import org.openecomp.sdc.be.model.operations.impl.AdditionalInformationOperation;
+import org.openecomp.sdc.be.model.operations.impl.ArtifactOperation;
+import org.openecomp.sdc.be.model.operations.impl.AttributeOperation;
+import org.openecomp.sdc.be.model.operations.impl.CacheMangerOperation;
+import org.openecomp.sdc.be.model.operations.impl.CapabilityInstanceOperation;
+import org.openecomp.sdc.be.model.operations.impl.CapabilityOperation;
+import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
+import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
+import org.openecomp.sdc.be.model.operations.impl.ElementOperation;
+import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
+import org.openecomp.sdc.be.model.operations.impl.GroupOperation;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.HeatParametersOperation;
+import org.openecomp.sdc.be.model.operations.impl.InputsOperation;
+import org.openecomp.sdc.be.model.operations.impl.InterfaceLifecycleOperation;
+import org.openecomp.sdc.be.model.operations.impl.LifecycleOperation;
+import org.openecomp.sdc.be.model.operations.impl.OnboardingClient;
+import org.openecomp.sdc.be.model.operations.impl.ProductOperation;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.model.operations.impl.RequirementOperation;
+import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import org.openecomp.sdc.be.tosca.CsarUtils;
+import org.openecomp.sdc.be.tosca.ToscaExportHandler;
+import org.openecomp.sdc.be.user.IUserBusinessLogic;
+import org.openecomp.sdc.be.user.UserBusinessLogic;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class AppConfig {
+ @Bean(name = "serviceMigrationBean")
+ public ServiceMigration serviceMigration() {
+ return new ServiceMigration();
+ }
+
+ @Bean(name = "vfcNamingAlignmentBean")
+ public VfcNamingAlignment vfcNamingAlignment() {
+ return new VfcNamingAlignment();
+ }
+
+ @Bean(name = "derivedFromAlignment")
+ public DerivedFromAlignment derivedFromAlignment() {
+ return new DerivedFromAlignment();
+ }
+
+ @Bean(name = "groupsAlignment")
+ public GroupsAlignment groupsAlignment() {
+ return new GroupsAlignment();
+ }
+
+ @Bean(name = "csarMigration")
+ public CsarMigration csarMigration() {
+ return new CsarMigration();
+ }
+
+ @Bean(name = "titan-generic-dao")
+ public TitanGenericDao titanGenericDao() {
+ return new TitanGenericDao();
+ }
+
+ @Bean(name = "titan-client", initMethod = "createGraph")
+ public TitanGraphClient titanClient() {
+ return new TitanGraphClient();
+ }
+
+ @Bean(name = "resource-operation")
+ public ResourceOperation resourceOperation() {
+ return new ResourceOperation();
+ }
+
+ @Bean(name = "service-operation")
+ public ServiceOperation serviceOperation() {
+ return new ServiceOperation();
+ }
+
+ @Bean(name = "component-instance-operation")
+ public ComponentInstanceOperation componentInstanceOperation() {
+ return new ComponentInstanceOperation();
+ }
+
+ @Bean(name = "capability-instanceOperation")
+ public CapabilityInstanceOperation capabilityInstanceOperation() {
+ return new CapabilityInstanceOperation();
+ }
+
+ @Bean(name = "property-operation")
+ public PropertyOperation propertyOperation() {
+ return new PropertyOperation();
+ }
+
+ @Bean(name = "attribute-operation")
+ public AttributeOperation attribueOperation() {
+ return new AttributeOperation();
+ }
+
+ @Bean(name = "application-datatype-cache")
+ public ApplicationDataTypeCache applicationDataTypeCache() {
+ return new ApplicationDataTypeCache();
+ }
+
+ @Bean(name = "requirement-operation")
+ public RequirementOperation requirementOperation() {
+ return new RequirementOperation();
+ }
+
+ @Bean(name = "capability-operation")
+ public CapabilityOperation capabilityOperation() {
+ return new CapabilityOperation();
+ }
+
+ @Bean(name = "interface-operation")
+ public InterfaceLifecycleOperation interfaceLifecycleOperation() {
+ return new InterfaceLifecycleOperation();
+ }
+
+ @Bean(name = "element-operation")
+ public IElementOperation elementOperation() {
+ return new ElementOperation();
+ }
+
+ @Bean(name = "additional-information-operation")
+ public IAdditionalInformationOperation addioAdditionalInformationOperation() {
+ return new AdditionalInformationOperation();
+ }
+
+ @Bean(name = "capability-type-operation")
+ public CapabilityTypeOperation capabilityTypeOperation() {
+ return new CapabilityTypeOperation();
+ }
+
+ @Bean(name = "artifact-operation")
+ public ArtifactOperation artifactOperation() {
+ return new ArtifactOperation();
+ }
+
+ @Bean(name = "heat-parameter-operation")
+ public HeatParametersOperation heatParametersOperation() {
+ return new HeatParametersOperation();
+ }
+
+ @Bean(name = "product-operation")
+ public ProductOperation productOperation() {
+ return new ProductOperation();
+ }
+
+ @Bean(name = "lifecycle-operation")
+ public LifecycleOperation lifecycleOperation() {
+ return new LifecycleOperation();
+ }
+
+ @Bean(name = "group-operation")
+ public GroupOperation groupOperation() {
+ return new GroupOperation();
+ }
+
+ @Bean(name = "group-type-operation")
+ public GroupTypeOperation groupTypeOperation() {
+ return new GroupTypeOperation();
+ }
+
+ @Bean(name = "attribute-operation")
+ public AttributeOperation attributeOperation() {
+ return new AttributeOperation();
+ }
+
+ @Bean(name = "titanFixUtils")
+ public TitanFixUtils titanFixUtils() {
+ return new TitanFixUtils();
+ }
+
+ @Bean(name = "populateComponentCache")
+ public PopulateComponentCache populateComponentCache() {
+ return new PopulateComponentCache();
+ }
+
+ @Bean(name = "artifact-cassandra-dao")
+ public ArtifactCassandraDao artifactCassandraDao() {
+ return new ArtifactCassandraDao();
+ }
+
+ @Bean(name = "component-cassandra-dao")
+ public ComponentCassandraDao componentCassandraDao() {
+ return new ComponentCassandraDao();
+ }
+
+ @Bean(name = "cassandra-client")
+ public CassandraClient cassandraClient() {
+ return new CassandraClient();
+ }
+
+ @Bean(name = "cacheManger-operation")
+ public CacheMangerOperation cacheMangerOperation() {
+ return new CacheMangerOperation();
+ }
+
+ @Bean(name = "component-cache")
+ public ComponentCache componentCache() {
+ return new ComponentCache();
+ }
+
+ @Bean(name = "input-operation")
+ public InputsOperation inputsOperation() {
+ return new InputsOperation();
+ }
+
+ /**
+ * Returns new instance of AuditCassandraDao
+ *
+ * @return
+ */
+ @Bean(name = "audit-cassandra-dao")
+ public AuditCassandraDao auditCassandraDao() {
+ return new AuditCassandraDao();
+ }
+
+ /**
+ * Returns new instance of UserBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "userBusinessLogic")
+ public IUserBusinessLogic userBusinessLogic() {
+ return new UserBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of UserAdminOperation
+ *
+ * @return
+ */
+ @Bean(name = "user-operation")
+ public IUserAdminOperation userOperation() {
+ return new UserAdminOperation();
+ }
+
+ /**
+ * Returns new instance of GraphLockOperation
+ *
+ * @return
+ */
+ @Bean(name = "graph-lock-operation")
+ public IGraphLockOperation graphLockOperation() {
+ return new GraphLockOperation();
+ }
+
+ /**
+ * Returns new instance of AuditingDao
+ *
+ * @return
+ */
+ @Bean(name = "auditingDao")
+ public AuditingDao auditingDao() {
+ return new AuditingDao();
+ }
+
+ /**
+ * Returns new instance of AuditingManager
+ *
+ * @return
+ */
+ @Bean(name = "auditingManager")
+ public IAuditingManager auditingManager() {
+ return new AuditingManager();
+ }
+
+ /**
+ * Returns new instance of ServiceBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "serviceBusinessLogic")
+ public ServiceBusinessLogic serviceBusinessLogic() {
+ return new ServiceBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ComponentsUtils
+ *
+ * @return
+ */
+ @Bean(name = "componentUtils")
+ public ComponentsUtils componentUtils() {
+ return new ComponentsUtils();
+ }
+
+ /**
+ * Returns new instance of ToscaArtifactsAlignment
+ *
+ * @return
+ */
+ @Bean(name = "toscaArtifactsAlignment")
+ public ToscaArtifactsAlignment toscaArtifactsAlignment() {
+ return new ToscaArtifactsAlignment();
+ }
+
+ /**
+ * Returns new instance of ArtifactsBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "artifactBusinessLogic")
+ public ArtifactsBusinessLogic artifactBusinessLogic() {
+ return new ArtifactsBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ResourceBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "resourceBusinessLogic")
+ public ResourceBusinessLogic resourceBusinessLogic() {
+ return new ResourceBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of LifecycleBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "lifecycleBusinessLogic")
+ public LifecycleBusinessLogic lifecycleBusinessLogic() {
+ return new LifecycleBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ServiceDistributionArtifactsBuilder
+ *
+ * @return
+ */
+ @Bean(name = "serviceDistributionArtifactsBuilder")
+ public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() {
+ return new ServiceDistributionArtifactsBuilder();
+ }
+
+ /**
+ * Returns new instance of DistributionEngine
+ *
+ * @return
+ */
+ @Bean(name = "distributionEngine")
+ public IDistributionEngine distributionEngine() {
+ // This dependency is needed for initializing context but is not used
+ return null;
+ }
+
+ /**
+ * Returns new instance of ElasticSearchClient
+ *
+ * @return
+ */
+ @Bean(name = "elasticsearch-client")
+ public ElasticSearchClient elasticsearchClient() {
+ // This dependency is needed for initializing context but is not used
+ return null;
+ }
+
+ /**
+ * Returns new instance of ProductBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "productBusinessLogic")
+ public ProductBusinessLogic productBusinessLogic() {
+ return new ProductBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ProductComponentInstanceBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "productComponentInstanceBusinessLogic")
+ public ProductComponentInstanceBusinessLogic productComponentInstanceBusinessLogic() {
+ return new ProductComponentInstanceBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ToscaExportHandler
+ *
+ * @return
+ */
+ @Bean(name = "tosca-export-handler")
+ public ToscaExportHandler toscaExportHandler() {
+ return new ToscaExportHandler();
+ }
+
+ /**
+ * Returns new instance of CsarOperation
+ *
+ * @return
+ */
+ @Bean(name = "csar-operation")
+ public CsarOperation csarOperation() {
+ return new CsarOperation();
+ }
+
+ /**
+ * Returns new instance of OnboardingClient
+ *
+ * @return
+ */
+ @Bean(name = "onboarding-client")
+ public OnboardingClient onboardingClient() {
+ return new OnboardingClient();
+ }
+
+ /**
+ * Returns new instance of VFComponentInstanceBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "vfComponentInstanceBusinessLogic")
+ public VFComponentInstanceBusinessLogic vfComponentInstanceBusinessLogic() {
+ return new VFComponentInstanceBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of ResourceImportManager
+ *
+ * @return
+ */
+ @Bean(name = "resourceImportManager")
+ public ResourceImportManager resourceImportManager() {
+ return new ResourceImportManager();
+ }
+
+ /**
+ * Returns new instance of GroupBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "groupBusinessLogic")
+ public GroupBusinessLogic groupBusinessLogic() {
+ return new GroupBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of InputsBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "inputsBusinessLogic")
+ public InputsBusinessLogic inputsBusinessLogic() {
+ return new InputsBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of CompositionBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "compositionBusinessLogic")
+ public CompositionBusinessLogic compositionBusinessLogic() {
+ return new CompositionBusinessLogic();
+ }
+
+ /**
+ * Returns new instance of CsarUtils
+ *
+ * @return
+ */
+ @Bean(name = "csar-utils")
+ public CsarUtils csarUtils() {
+ return new CsarUtils();
+ }
+
+ /**
+ * Returns new instance of ServiceComponentInstanceBusinessLogic
+ *
+ * @return
+ */
+ @Bean(name = "serviceComponentInstanceBusinessLogic")
+ public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() {
+ return new ServiceComponentInstanceBusinessLogic();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/DerivedFromAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/DerivedFromAlignment.java
new file mode 100644
index 0000000000..c1ddc4fee0
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/DerivedFromAlignment.java
@@ -0,0 +1,232 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphRelation;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgeLabels;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import fj.data.Either;
+
+public class DerivedFromAlignment {
+ private static Logger log = LoggerFactory.getLogger(VfcNamingAlignment.class.getName());
+ private Map<String, String> newDerivedFromValuesHM = new HashMap<String, String>();
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+
+ public boolean alignDerivedFrom1604(String appConfigDir, String dataInputFileDir) {
+ log.debug("Started alignDerivedFrom1604 procedure..");
+ boolean result = false;
+ try {
+ if (!getDerivedFromValuesFromFile(dataInputFileDir)) {
+ log.error("Started alignDerivedFrom1604 procedure was failed. Missing data in the input data file.");
+ return result;
+ }
+ result = changeDerivedFrom();
+ } finally {
+ if (!result) {
+ titanGenericDao.rollback();
+ log.debug("**********************************************");
+ log.debug("alignDerivedFrom1604 procedure FAILED!!");
+ log.debug("**********************************************");
+ } else {
+ titanGenericDao.commit();
+ log.debug("**********************************************");
+ log.debug("alignDerivedFrom1604 procedure ended successfully!");
+ log.debug("**********************************************");
+ }
+ }
+ return result;
+ }
+
+ private boolean changeDerivedFrom() {
+ Map<String, ResourceMetadataData> resourcesHM = getLatestVersionsOfResources();
+ if (resourcesHM == null)
+ return false;
+ Map<String, ResourceMetadataData> derivedFromResourcesHM = getLatestCertifiedVersionsOfDerivedFromResources();
+ if (derivedFromResourcesHM == null)
+ return false;
+ return updateEdges(resourcesHM, derivedFromResourcesHM);
+ }
+
+ private boolean updateEdges(Map<String, ResourceMetadataData> resourcesHM,
+ Map<String, ResourceMetadataData> derivedFromResourcesHM) {
+ log.debug("Updating of Edges has been started..");
+ for (Entry<String, ResourceMetadataData> pair : resourcesHM.entrySet()) {
+ ResourceMetadataData curResource = pair.getValue();
+ String uniqeID = (String) curResource.getUniqueId();
+ Either<ImmutablePair<ResourceMetadataData, GraphEdge>, TitanOperationStatus> parentResourceRes = titanGenericDao
+ .getChild(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), uniqeID,
+ GraphEdgeLabels.DERIVED_FROM, NodeTypeEnum.Resource, ResourceMetadataData.class);
+ if (parentResourceRes.isLeft()) {
+ ImmutablePair<ResourceMetadataData, GraphEdge> value = parentResourceRes.left().value();
+ ResourceMetadataData parentResourceData = value.getKey();
+ log.debug("Deleting old relation..");
+ Either<GraphRelation, TitanOperationStatus> deletingRelationRes = titanGenericDao
+ .deleteRelation(curResource, parentResourceData, GraphEdgeLabels.DERIVED_FROM);
+ if (deletingRelationRes.isRight()) {
+ log.error("Couldn't delete relation from resource {} to resource {}, error: {}",
+ curResource.getMetadataDataDefinition().getName(),
+ parentResourceData.getMetadataDataDefinition().getName(),
+ deletingRelationRes.right().value());
+ return false;
+ }
+ ResourceMetadataData newDerivedFromResource = derivedFromResourcesHM.get(pair.getKey());
+ Either<GraphRelation, TitanOperationStatus> creatingRelationRes = titanGenericDao
+ .createRelation(curResource, newDerivedFromResource, GraphEdgeLabels.DERIVED_FROM, null);
+ if (creatingRelationRes.isRight()) {
+ log.error("Couldn't create relation from resource {} to resource {}, error: {}",
+ curResource.getMetadataDataDefinition().getName(),
+ newDerivedFromResource.getMetadataDataDefinition().getName(),
+ creatingRelationRes.right().value());
+ return false;
+ }
+ } else {
+ log.error("Couldn't get derived from resource for child resource {}, error: {}", pair.getKey(),
+ parentResourceRes.right().value());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private Map<String, ResourceMetadataData> getLatestCertifiedVersionsOfDerivedFromResources() {
+ log.debug("Getting latest certified versions of derived from resources according input file");
+ Map<String, ResourceMetadataData> resourcesHM = new HashMap<String, ResourceMetadataData>();
+ Map<String, Object> props = null;
+ for (Entry<String, String> pair : newDerivedFromValuesHM.entrySet()) {
+ props = new HashMap<String, Object>();
+ props.put(GraphPropertiesDictionary.TOSCA_RESOURCE_NAME.getProperty(), pair.getValue());
+ props.put(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true);
+ Either<List<ResourceMetadataData>, TitanOperationStatus> highestVersionResource = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (highestVersionResource.isRight()) {
+ log.error("Couldn't get resource {} from DB, error: {}", pair.getValue(),
+ highestVersionResource.right().value());
+ return null;
+ }
+ List<ResourceMetadataData> highestVersionResourceAL = highestVersionResource.left().value();
+ if (highestVersionResourceAL == null) {
+ log.error("Couldn't get resource {}. No resource found", pair.getValue());
+ return null;
+ }
+ ResourceMetadataData resource = highestVersionResourceAL.get(0);
+ String state = resource.getMetadataDataDefinition().getState();
+ if (!state.equals(LifecycleStateEnum.CERTIFIED.name())) {
+ log.error(
+ "alignDerivedFrom1604 procedure FAILED!! Derived from resource {} is not certified. Please certify manually and repeat the procedure.",
+ pair.getValue());
+ return null;
+ }
+ resourcesHM.put(pair.getKey(), resource);
+ }
+ return resourcesHM;
+ }
+
+ private Map<String, ResourceMetadataData> getLatestVersionsOfResources() {
+ log.debug("Getting latest versions of resources according input file");
+ Map<String, ResourceMetadataData> resourcesHM = new HashMap<String, ResourceMetadataData>();
+ ResourceMetadataData foundResource = null;
+ Map<String, Object> props = null;
+ for (Entry<String, String> pair : newDerivedFromValuesHM.entrySet()) {// filter
+ props = new HashMap<String, Object>();
+ props.put(GraphPropertiesDictionary.NAME.getProperty(), pair.getKey());
+ props.put(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true);
+
+ Either<List<ResourceMetadataData>, TitanOperationStatus> highestVersionResource = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (highestVersionResource.isRight()) {
+ log.error("Couldn't get resource {} from DB, error: {}", pair.getKey(),
+ highestVersionResource.right().value());
+ return null;
+ }
+ List<ResourceMetadataData> highestVersionResourceAL = highestVersionResource.left().value();
+ if (highestVersionResourceAL == null) {
+ log.error("Couldn't get resource {}. No resource found", pair.getKey());
+ return null;
+ }
+ if (highestVersionResourceAL.size() > 2) {
+ log.error("Invalid response. Found more than two highest version resources with name {}.",
+ pair.getKey());
+ return null;
+ }
+ foundResource = highestVersionResourceAL.get(0);
+ if (highestVersionResourceAL.size() == 2) {
+ foundResource = foundResource.getMetadataDataDefinition().getState()
+ .equals(LifecycleStateEnum.CERTIFIED.name()) ? highestVersionResourceAL.get(1) : foundResource;
+ }
+ resourcesHM.put(pair.getKey(), foundResource);
+ }
+ return resourcesHM;
+ }
+
+ private boolean getDerivedFromValuesFromFile(String dataInputFileDir) {
+ BufferedReader br = null;
+ String curPair = null;
+ try {
+ br = new BufferedReader(new FileReader(dataInputFileDir));
+ while ((curPair = br.readLine()) != null) {
+ String[] pair = curPair.split(" ");
+ if (pair.length < 2) {
+ log.error(
+ "Expected at least two tokens in every line. Usage: <resource_name> <new_derived_from_name>");
+ return false;
+ }
+ String derivedFrom = pair[pair.length - 1];
+ String name = curPair.substring(0, curPair.length() - derivedFrom.length() - 1);
+ newDerivedFromValuesHM.put(name, derivedFrom);
+ }
+ return true;
+ } catch (FileNotFoundException e) {
+ log.error("Started alignDerivedFrom1604 procedure was failed. Missing input data file.", e);
+ } catch (IOException e) {
+ log.error("Started alignDerivedFrom1604 procedure was failed. The input data file is empty.", e);
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ log.debug("failed to close file reader", e);
+ }
+ }
+ }
+ return false;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/GroupsAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/GroupsAlignment.java
new file mode 100644
index 0000000000..aebcddfee2
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/GroupsAlignment.java
@@ -0,0 +1,201 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.operations.api.IArtifactOperation;
+import org.openecomp.sdc.be.model.operations.api.IGroupOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import fj.data.Either;
+
+public class GroupsAlignment {
+
+ private static Logger log = LoggerFactory.getLogger(ServiceMigration.class.getName());
+
+ public static String INITIAL_VERSION = "1.0";
+ private static final String DEFAULT_GROUP_VF_MODULE = "org.openecomp.groups.VfModule";
+ private static final String MODULE = "::module-";
+
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+ @Autowired
+ protected IArtifactOperation artifactOperation;
+ @Autowired
+ protected IGroupOperation groupOperation;
+ @Autowired
+ protected GroupTypeOperation groupTypeOperation;
+
+ public boolean alignGroups(String appConfigDir) {
+
+ log.debug("Started the align groups procedure ...");
+ log.debug("Getting all resources with resources");
+ boolean result = false;
+ try {
+
+ Map<String, Object> properties = new HashMap<>();
+ properties.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allVfResources = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, properties, ResourceMetadataData.class);
+
+ if (allVfResources.isRight()) {
+ log.error("Couldn't get VF resources from DB, error: {}", allVfResources.right().value());
+ result = false;
+ return result;
+ }
+ List<ResourceMetadataData> resourcesList = allVfResources.left().value();
+ if (resourcesList == null) {
+ log.error("Couldn't get VF resources from DB, no resources found");
+ result = false;
+ return result;
+ }
+ log.debug("Found {} VF resources", resourcesList.size());
+ for (ResourceMetadataData resource : resourcesList) {
+ result = createGroupIfContainsArtifacts(resource);
+ }
+ } finally {
+ if (!result) {
+ titanGenericDao.rollback();
+ log.debug("**********************************************");
+ log.debug("The align groups procedure FAILED!!");
+ log.debug("**********************************************");
+ } else {
+ titanGenericDao.commit();
+ log.debug("**********************************************");
+ log.debug("The align groups procedure ended successfully!");
+ log.debug("**********************************************");
+ }
+ }
+
+ return result;
+ }
+
+ private boolean createGroupIfContainsArtifacts(ResourceMetadataData resource) {
+
+ String uniqueId = resource.getMetadataDataDefinition().getUniqueId();
+ StorageOperationStatus result = StorageOperationStatus.OK;
+ Either<Map<String, ArtifactDefinition>, StorageOperationStatus> allArtifactsRes = artifactOperation
+ .getArtifacts(uniqueId, NodeTypeEnum.Resource, true);
+ if (allArtifactsRes.isRight()) {
+ log.error("Couldn't get resource artifacts from DB, error: {}", allArtifactsRes.right().value());
+ return false;
+ }
+ Map<String, ArtifactDefinition> artifactsHM = allArtifactsRes.left().value();
+ ArrayList<String> foundArtifactsAL = new ArrayList<String>();
+ for (ArtifactDefinition curArtifact : artifactsHM.values()) {
+ String atrifactType = curArtifact.getArtifactType();
+ if (atrifactType.equalsIgnoreCase(ArtifactTypeEnum.HEAT_VOL.getType())
+ || atrifactType.equalsIgnoreCase(ArtifactTypeEnum.HEAT_NET.getType())
+ || atrifactType.equalsIgnoreCase(ArtifactTypeEnum.HEAT.getType())) {
+ foundArtifactsAL.add(curArtifact.getUniqueId());
+ }
+ }
+ if (foundArtifactsAL.size() > 0) {
+ Either<List<GroupDefinition>, TitanOperationStatus> allGroupsRes = groupOperation
+ .getAllGroupsFromGraph(uniqueId, NodeTypeEnum.Resource);
+ int groupCounter = 1;
+ if (allGroupsRes.isRight()) {
+ if (allGroupsRes.right().value().name().equals(TitanOperationStatus.OK.name())
+ || allGroupsRes.right().value().name().equals(TitanOperationStatus.NOT_FOUND.name())) {
+ log.debug("Not found groups resource related to resource {}, response: {}",
+ resource.getMetadataDataDefinition().getName(), allGroupsRes.right().value());
+ } else {
+ log.error("Not found groups resource related to resource {}, DB error: {}",
+ resource.getMetadataDataDefinition().getName(), allGroupsRes.right().value());
+ return false;
+ }
+ } else if (allGroupsRes.left().value() != null && allGroupsRes.left().value().size() > 0) {
+ groupCounter += allGroupsRes.left().value().size();
+ for (GroupDefinition curGroup : allGroupsRes.left().value()) {
+ for (String curGroupArtifact : curGroup.getArtifacts()) {
+ if (foundArtifactsAL.contains(curGroupArtifact)) {
+ foundArtifactsAL.remove(curGroupArtifact);
+ }
+ }
+ }
+ }
+ if (foundArtifactsAL.size() > 0) {
+ GroupDefinition groupDefinition = new GroupDefinition();
+ groupDefinition.setName(resource.getMetadataDataDefinition().getName() + MODULE + groupCounter);
+ groupDefinition.setType(DEFAULT_GROUP_VF_MODULE);
+ groupDefinition.setArtifacts(foundArtifactsAL);
+ log.debug("Creating new group {} for VF resource {}", groupDefinition.getName(),
+ resource.getMetadataDataDefinition().getName());
+ return createGroup(resource.getUniqueId(), ComponentTypeEnum.RESOURCE, groupDefinition);
+
+ }
+ }
+ return true;
+ }
+
+ private boolean createGroup(Object uniqueId, ComponentTypeEnum componentType, GroupDefinition groupDefinition) {
+
+ NodeTypeEnum nodeTypeEnum = componentType.getNodeType();
+ String groupType = groupDefinition.getType();
+
+ Either<GroupTypeDefinition, StorageOperationStatus> getGroupTypeRes = groupTypeOperation
+ .getLatestGroupTypeByType(groupType, true);
+ if (getGroupTypeRes.isRight()) {
+ log.error("Couldn't get grouptype by type {} from DB, error: {}", groupType,
+ getGroupTypeRes.right().value());
+ return false;
+ }
+
+ GroupTypeDefinition groupTypeDefinition = getGroupTypeRes.left().value();
+
+ String invariantUUID = UniqueIdBuilder.buildInvariantUUID();
+ groupDefinition.setInvariantUUID(invariantUUID);
+ groupDefinition.setVersion(INITIAL_VERSION);
+ groupDefinition.setTypeUid(groupTypeDefinition.getUniqueId());
+
+ Either<GroupDefinition, StorageOperationStatus> addGroupToGraphRes = groupOperation.addGroup(nodeTypeEnum,
+ (String) uniqueId, groupDefinition, true);
+
+ if (addGroupToGraphRes.isRight()) {
+ log.error("Couldn't add group {} to graph, error: {}", groupDefinition.getName(),
+ addGroupToGraphRes.right().value());
+ return false;
+ }
+ log.debug("The group {} has been created", groupDefinition.getName());
+ return true;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationCategory.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationCategory.java
new file mode 100644
index 0000000000..5605f0980d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationCategory.java
@@ -0,0 +1,48 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.util.List;
+
+import org.openecomp.sdc.be.datatypes.category.CategoryDataDefinition;
+
+public class MigrationCategory extends CategoryDataDefinition {
+ private String oldName;
+
+ List<MigrationSubCategory> subcategories;
+
+ public String getOldName() {
+ return oldName;
+ }
+
+ public void setOldName(String oldName) {
+ this.oldName = oldName;
+ }
+
+ public List<MigrationSubCategory> getSubcategories() {
+ return subcategories;
+ }
+
+ public void setSubcategories(List<MigrationSubCategory> subcategories) {
+ this.subcategories = subcategories;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationSubCategory.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationSubCategory.java
new file mode 100644
index 0000000000..f1886622be
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/MigrationSubCategory.java
@@ -0,0 +1,36 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import org.openecomp.sdc.be.datatypes.category.SubCategoryDataDefinition;
+
+public class MigrationSubCategory extends SubCategoryDataDefinition {
+ private String oldName;
+
+ public String getOldName() {
+ return oldName;
+ }
+
+ public void setOldName(String oldName) {
+ this.oldName = oldName;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java
new file mode 100644
index 0000000000..cb7f05ddfd
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java
@@ -0,0 +1,1703 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import static java.nio.file.Files.readAllBytes;
+import static java.nio.file.Paths.get;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphRelation;
+import org.openecomp.sdc.be.dao.graph.datatype.RelationEndPoint;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgeLabels;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgePropertiesDictionary;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.AdditionalInformationDefinition;
+import org.openecomp.sdc.be.model.CapabilityDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.RequirementDefinition;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.category.CategoryDefinition;
+import org.openecomp.sdc.be.model.category.SubCategoryDefinition;
+import org.openecomp.sdc.be.model.operations.api.IElementOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.AdditionalInformationOperation;
+import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ComponentOperation;
+import org.openecomp.sdc.be.model.operations.impl.LifecycleOperation;
+import org.openecomp.sdc.be.model.operations.impl.ProductOperation;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.CapabilityData;
+import org.openecomp.sdc.be.resources.data.ComponentInstanceData;
+import org.openecomp.sdc.be.resources.data.ComponentMetadataData;
+import org.openecomp.sdc.be.resources.data.ProductMetadataData;
+import org.openecomp.sdc.be.resources.data.RelationshipInstData;
+import org.openecomp.sdc.be.resources.data.RequirementData;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.openecomp.sdc.be.resources.data.ServiceMetadataData;
+import org.openecomp.sdc.be.resources.data.TagData;
+import org.openecomp.sdc.be.resources.data.category.CategoryData;
+import org.openecomp.sdc.be.resources.data.category.SubCategoryData;
+import org.openecomp.sdc.be.utils.CommonBeUtils;
+import org.openecomp.sdc.common.util.ValidationUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.yaml.snakeyaml.Yaml;
+
+import com.thinkaurelius.titan.core.TitanEdge;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
+
+import fj.data.Either;
+
+public class ServiceMigration {
+
+ private static final String[] NORMATIVE_OLD_NAMES = {
+ "tosca.nodes.network.Network", "tosca.nodes.network.Port",
+ "tosca.nodes.BlockStorage", "tosca.nodes.Compute", "tosca.nodes.Container.Application",
+ "tosca.nodes.Container.Runtime", "tosca.nodes.Database", "tosca.nodes.DBMS", "tosca.nodes.LoadBalancer",
+ "tosca.nodes.ObjectStorage", "tosca.nodes.Root", "tosca.nodes.SoftwareComponent",
+ "tosca.nodes.WebApplication", "tosca.nodes.WebServer", };
+
+ private static Logger log = LoggerFactory.getLogger(ServiceMigration.class.getName());
+
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+ @Autowired
+ protected ResourceOperation resourceOperation;
+ @Autowired
+ protected ServiceOperation serviceOperation;
+ @Autowired
+ protected ProductOperation productOperation;
+ @Autowired
+ protected LifecycleOperation lifecycleOperaion;
+ @Autowired
+ protected PropertyOperation propertyOperation;
+ @Autowired
+ protected AdditionalInformationOperation additionalInformationOperation;
+ @Autowired
+ protected ComponentInstanceOperation componentInstanceOperaion;
+ @Autowired
+ protected IElementOperation elementOperation;
+
+ public boolean migrate1602to1604(String appConfigDir) {
+
+ boolean result = false;
+
+ try {
+
+ if (!addResourceCounterToResources()) {
+ log.debug("Failed to update resource instance counter on resources");
+ result = false;
+ return result;
+ }
+ if (!updateComponentInstanceType()) {
+ log.debug("Failed to update component instance type");
+ result = false;
+ return result;
+ }
+ // fix VF
+ if (!fixDerivedVf()) {
+ log.debug("Failed to fix VFs");
+ result = false;
+ return result;
+ }
+ // update instances and relation
+ if (!updateCalculatedEdges()) {
+ log.debug("Failed to update calculated edges for VF instances");
+ result = false;
+ return result;
+ }
+ // update instances and relation
+ if (!updateRelations()) {
+ log.debug("Failed to update Instance And Relations in services");
+ result = false;
+ return result;
+ }
+ if (!updateCategories(appConfigDir)) {
+ log.debug("Failed to update categories");
+ result = false;
+ return result;
+ }
+
+ if (!AllowMultipleHeats.removeAndUpdateHeatPlaceHolders(titanGenericDao, log, true)) {
+ log.error("Failed to update heat place holders");
+ result = false;
+ return result;
+ }
+
+ if (!AddGroupUuid.addGroupUuids(titanGenericDao, log, true)) {
+ log.error("Failed to update group UUIDs");
+ result = false;
+ return result;
+ }
+
+ result = true;
+ } finally {
+ if (!result) {
+ titanGenericDao.rollback();
+ } else {
+ titanGenericDao.commit();
+ }
+ }
+ return result;
+ }
+
+ private boolean updateCategories(String appConfigDir) {
+ // String categoryMigrationFile = appConfigDir + File.separator +
+ // "categoryMigration.yaml";
+ String categoryMigrationFile = appConfigDir + "categoryMigration.yaml";
+
+ Map<String, List<MigrationCategory>> categoriesFromYml;
+ try {
+ categoriesFromYml = createCategoriesFromYml(categoryMigrationFile);
+ if (categoriesFromYml == null || categoriesFromYml.isEmpty()) {
+ log.debug("updateCategories failed to load categories form migration file {}", categoryMigrationFile);
+ return false;
+ }
+ } catch (Exception e) {
+ log.debug("Failed to load category migration file :{} error: {}",categoryMigrationFile, e);
+ return false;
+ }
+ for (Map.Entry<String, List<MigrationCategory>> entry : categoriesFromYml.entrySet()) {
+ ComponentTypeEnum componentType = ComponentTypeEnum.findByParamName(entry.getKey());
+ if (componentType != null) {
+ switch (componentType) {
+ case RESOURCE:
+ if (updateResourceCategories(entry.getValue()) == false) {
+ log.debug("updateCategories failed to update resource categories");
+ return false;
+ }
+ break;
+ case SERVICE:
+ if (updateServiceCategories(entry.getValue()) == false) {
+ log.debug("updateCategories failed to update service categories");
+ return false;
+ }
+ break;
+ default:
+ log.debug("updateCategories no changes for categories from type {}", componentType);
+ }
+ } else {
+ log.debug("updateCategories failed not supported component file in migration categories file {}", entry.getKey());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean updateServiceCategories(List<MigrationCategory> categories) {
+ log.debug("updateServiceCategories STARTED");
+ Either<List<CategoryDefinition>, ActionStatus> serviceCategories = elementOperation
+ .getAllCategories(NodeTypeEnum.ServiceNewCategory, true);
+ if (serviceCategories.isRight()) {
+ log.debug("updateServiceCategories failed fetch all service categories ,error: {}", serviceCategories.right().value());
+ return false;
+ }
+ for (MigrationCategory newCat : categories) {
+
+ if (newCat.getOldName() == null) {
+ // add new
+ boolean exist = false;
+ for (CategoryDefinition catInDB : serviceCategories.left().value()) {
+ if (newCat.getName().equals(catInDB.getName())) {
+ exist = true;
+ break;
+ }
+ }
+ if (!exist) {
+ CategoryDefinition categoryDefinition = new CategoryDefinition(newCat);
+ Either<CategoryDefinition, ActionStatus> result = elementOperation
+ .createCategory(categoryDefinition, NodeTypeEnum.ServiceNewCategory, true);
+ if (result.isRight()) {
+ log.debug("Failed to create service category {}, error: {}", categoryDefinition, result.right().value());
+ return false;
+ }
+ log.debug("service category {} created", categoryDefinition);
+ }
+ } else {
+ // update exist
+ for (CategoryDefinition catInDB : serviceCategories.left().value()) {
+ if (newCat.getOldName().equals(catInDB.getName())) {
+ Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(
+ newCat, NodeTypeEnum.ServiceNewCategory);
+ if (updateSingleResult.isRight()) {
+ return false;
+ }
+ break;
+ }
+ }
+ }
+ }
+ log.debug("updateServiceCategories ENDED");
+ return true;
+ }
+
+ private Either<CategoryData, TitanOperationStatus> updateSingleResourceCategory(MigrationCategory newCat,
+ NodeTypeEnum nodetype) {
+ Map<String, Object> properties = new HashMap<>();
+ properties.put(GraphPropertiesDictionary.NAME.getProperty(), newCat.getOldName());
+ Either<List<CategoryData>, TitanOperationStatus> categoryEither = titanGenericDao.getByCriteria(nodetype,
+ properties, CategoryData.class);
+ if (categoryEither.isRight() && categoryEither.right().value() != TitanOperationStatus.NOT_FOUND) {
+ log.debug("Failed to get {} categories, for name {} error {}", nodetype, newCat.getOldName(),
+ categoryEither.right().value());
+ return Either.right(categoryEither.right().value());
+ }
+ List<CategoryData> categoryList = (categoryEither.isLeft() ? categoryEither.left().value() : null);
+ if (categoryList == null) {
+ log.debug("No {} categories, for name {} error {}", nodetype, newCat.getOldName());
+ return Either.right(TitanOperationStatus.NOT_FOUND);
+ }
+ CategoryData categoryData = categoryList.get(0);
+ categoryData.getCategoryDataDefinition().setName(newCat.getName());
+ categoryData.getCategoryDataDefinition().setIcons(newCat.getIcons());
+ categoryData.getCategoryDataDefinition()
+ .setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(newCat.getName()));
+ Either<CategoryData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(categoryData,
+ CategoryData.class);
+ if (updateNode.isRight()) {
+ log.debug("Failed to update {} category {} error {}", nodetype, categoryData, updateNode.right().value());
+ return Either.right(updateNode.right().value());
+ }
+ log.debug("Update {} category {} ", nodetype, categoryData);
+ return Either.left(updateNode.left().value());
+ }
+
+ private boolean updateResourceCategories(List<MigrationCategory> categories) {
+ log.debug("updateResourceCategories STARTED");
+ Either<List<CategoryDefinition>, ActionStatus> resourceCategories = elementOperation
+ .getAllCategories(NodeTypeEnum.ResourceNewCategory, true);
+ if (resourceCategories.isRight()) {
+ log.debug("updateResourceCategories failed fetch all resource categories ,error "
+ + resourceCategories.right().value());
+ return false;
+ }
+ for (MigrationCategory newCat : categories) {
+ if (newCat.getOldName() == null) {
+ // add new
+ // check if already created in previous running
+ boolean exist = false;
+ for (CategoryDefinition catInDB : resourceCategories.left().value()) {
+ if (newCat.getName().equals(catInDB.getName())) {
+ exist = true;
+ }
+ }
+ if (!exist) {
+ CategoryDefinition categoryDefinition = new CategoryDefinition(newCat);
+ Either<CategoryDefinition, ActionStatus> resultCat = elementOperation
+ .createCategory(categoryDefinition, NodeTypeEnum.ResourceNewCategory, true);
+ if (resultCat.isRight()) {
+ log.debug("Failed to create resource category {}, error: {}", categoryDefinition, resultCat.right().value());
+ return false;
+ }
+ log.debug("resource category {} created", categoryDefinition);
+
+ List<MigrationSubCategory> nSubCat = newCat.getSubcategories();
+ List<MigrationSubCategory> newSubcat = nSubCat;
+ List<MigrationSubCategory> subcategories = newSubcat;
+ for (MigrationSubCategory msubcat : subcategories) {
+ SubCategoryDefinition subcat = new SubCategoryDefinition(msubcat);
+ Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation.createSubCategory(
+ resultCat.left().value().getUniqueId(), subcat, NodeTypeEnum.ResourceSubcategory, true);
+ if (resultSubcat.isRight()) {
+ log.debug("Failed to create resource sub category {} error: {}", subcat, resultSubcat.right().value());
+ return false;
+ }
+ log.debug("resource sub category {} created for category {}", categoryDefinition, resultCat.left().value().getName());
+ }
+ }
+ } else {
+ // update exist
+ for (CategoryDefinition catInDB : resourceCategories.left().value()) {
+ if (newCat.getOldName().equals(catInDB.getName())) {
+ Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(
+ newCat, NodeTypeEnum.ResourceNewCategory);
+ if (updateSingleResult.isRight()) {
+ return false;
+ }
+
+ CategoryData categoryData = updateSingleResult.left().value();
+ for (MigrationSubCategory migSubCat : newCat.getSubcategories()) {
+ if (migSubCat.getOldName() == null) {
+ // create new one
+ boolean existSub = false;
+ for (SubCategoryDefinition subCatInDb : catInDB.getSubcategories()) {
+ if (subCatInDb.getName().equals(migSubCat.getName())) {
+ existSub = true;
+ }
+ }
+ if (!existSub) {
+ SubCategoryDefinition subcat = new SubCategoryDefinition(migSubCat);
+
+ Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation
+ .createSubCategory((String) categoryData.getUniqueId(), subcat,
+ NodeTypeEnum.ResourceSubcategory, true);
+ if (resultSubcat.isRight()) {
+ log.debug("Failed to create resource sub category {} error: {}", subcat, resultSubcat.right().value());
+ return false;
+ }
+ log.debug("resource sub category {}", categoryData, resultSubcat.left().value().getName());
+ }
+ } else {
+ if (updateSingleSubCategory(newCat, migSubCat,
+ updateSingleResult.left().value()) == false) {
+ return false;
+ }
+ }
+ }
+ break;
+ }
+ }
+ }
+ }
+ return true;
+ }
+
+ private boolean updateSingleSubCategory(MigrationCategory newCat, MigrationSubCategory migSubCat,
+ CategoryData categoryData) {
+
+ Either<List<ImmutablePair<SubCategoryData, GraphEdge>>, TitanOperationStatus> subcategories = titanGenericDao
+ .getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceNewCategory),
+ (String) categoryData.getUniqueId(), GraphEdgeLabels.SUB_CATEGORY,
+ NodeTypeEnum.ResourceSubcategory, SubCategoryData.class);
+
+ if (subcategories.isRight()) {
+ log.debug("Failed to get resource sub categories, for name {} error {}", newCat.getOldName(),
+ subcategories.right().value());
+ return false;
+ }
+
+ for (ImmutablePair<SubCategoryData, GraphEdge> pair : subcategories.left().value()) {
+ if (pair.getKey().getSubCategoryDataDefinition().getName().equals(migSubCat.getOldName())) {
+ SubCategoryData subCategoryData = pair.getKey();
+ subCategoryData.getSubCategoryDataDefinition().setName(migSubCat.getName());
+ subCategoryData.getSubCategoryDataDefinition().setIcons(migSubCat.getIcons());
+ subCategoryData.getSubCategoryDataDefinition()
+ .setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(migSubCat.getName()));
+ Either<SubCategoryData, TitanOperationStatus> updateSubNode = titanGenericDao
+ .updateNode(subCategoryData, SubCategoryData.class);
+ if (updateSubNode.isRight()) {
+ log.debug("Failed to update resource sub category {} error {}", subCategoryData,
+ updateSubNode.right().value());
+ return false;
+ }
+ log.debug("Update resource subcategory category {} ", subCategoryData);
+ break;
+ }
+ }
+ return true;
+ }
+
+ private Map<String, List<MigrationCategory>> createCategoriesFromYml(String categoriesTypesYml) {
+ String yamlAsString;
+ try {
+ yamlAsString = new String(readAllBytes(get(categoriesTypesYml)));
+ } catch (Exception e) {
+ log.debug("Failed to load category import file exception : ", e);
+ return null;
+ }
+
+ log.debug("received yaml: {}", yamlAsString);
+
+ Map<String, Object> toscaJson = (Map<String, Object>) new Yaml().load(yamlAsString);
+ Map<String, List<MigrationCategory>> allCategories = new HashMap<>();
+
+ Iterator<Entry<String, Object>> categoryEntryItr = toscaJson.entrySet().iterator();
+ while (categoryEntryItr.hasNext()) {
+ Entry<String, Object> categoryTypeEntry = categoryEntryItr.next();
+ String categoryType = categoryTypeEntry.getKey();
+ List<MigrationCategory> categoriesPerType = null;
+ Map<String, Object> categoryPerType = null;
+ switch (categoryType) {
+ case ComponentTypeEnum.SERVICE_PARAM_NAME:
+ categoryPerType = (Map<String, Object>) categoryTypeEntry.getValue();
+ categoriesPerType = createServiceCategories(categoryPerType);
+ break;
+ case ComponentTypeEnum.RESOURCE_PARAM_NAME:
+ categoryPerType = (Map<String, Object>) categoryTypeEntry.getValue();
+ categoriesPerType = createResourceCategories(categoryPerType);
+ break;
+ case ComponentTypeEnum.PRODUCT_PARAM_NAME:
+ // TODO
+ break;
+ default:
+ log.debug("Not supported category type - {}", categoryType);
+ break;
+ }
+ if (categoriesPerType != null) {
+ allCategories.put(categoryType, categoriesPerType);
+ }
+ }
+ return allCategories;
+ }
+
+ private List<MigrationCategory> createServiceCategories(Map<String, Object> categories) {
+ List<MigrationCategory> categroiesDef = new ArrayList<>();
+ String catName = null;
+ List<String> icons = null;
+ String oldName = null;
+ for (Entry<String, Object> entry : categories.entrySet()) {
+ MigrationCategory catDef = new MigrationCategory();
+ Map<String, Object> category = (Map<String, Object>) entry.getValue();
+ catName = (String) category.get("name");
+ catDef.setName(catName);
+ icons = (List<String>) category.get("icons");
+ catDef.setIcons(icons);
+ String normalizedName = ValidationUtils.normalizeCategoryName4Uniqueness(catName);
+ catDef.setNormalizedName(normalizedName);
+ oldName = (String) category.get("oldName");
+ catDef.setOldName(oldName);
+ categroiesDef.add(catDef);
+ }
+
+ return categroiesDef;
+ }
+
+ private List<MigrationCategory> createResourceCategories(Map<String, Object> categoryPerType) {
+ List<MigrationCategory> categroiesDef = new ArrayList<>();
+ for (Map.Entry<String, Object> entry : categoryPerType.entrySet()) {
+ Map<String, Object> category = (Map<String, Object>) entry.getValue();
+ MigrationCategory catDef = new MigrationCategory();
+ String catName = (String) category.get("name");
+ catDef.setName(catName);
+ String normalizedName = ValidationUtils.normalizeCategoryName4Uniqueness(catName);
+ catDef.setNormalizedName(normalizedName);
+ String oldName = (String) category.get("oldName");
+ catDef.setOldName(oldName);
+
+ Map<String, Object> subcategories = (Map<String, Object>) category.get("subcategories");
+ List<MigrationSubCategory> subcateDef = new ArrayList<>();
+ for (Entry<String, Object> subcategory : subcategories.entrySet()) {
+ Map<String, Object> subcategoryInfo = (Map<String, Object>) subcategory.getValue();
+ MigrationSubCategory subDef = new MigrationSubCategory();
+ String subcategoryName = (String) subcategoryInfo.get("name");
+ subDef.setName(subcategoryName);
+ List<String> subcategoryIcons = (List<String>) subcategoryInfo.get("icons");
+ subDef.setIcons(subcategoryIcons);
+ normalizedName = ValidationUtils.normalizeCategoryName4Uniqueness(subcategoryName);
+ subDef.setNormalizedName(normalizedName);
+ oldName = (String) subcategoryInfo.get("oldName");
+ subDef.setOldName(oldName);
+
+ subcateDef.add(subDef);
+ }
+
+ catDef.setSubcategories(subcateDef);
+ categroiesDef.add(catDef);
+ }
+ return categroiesDef;
+ }
+
+ private boolean updateCalculatedEdges() {
+ log.debug("update calculated edges STARTED");
+
+ Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao
+ .getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
+ if (allInstances.isRight() && !allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug(
+ "updateCalculatedEdges failed fetch all resource instances ,error " + allInstances.right().value());
+ return false;
+ }
+ if (allInstances.isRight() && allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("updateCalculatedEdges - no VFs");
+ return true;
+ }
+ List<ComponentInstanceData> listOfInstances = allInstances.left().value();
+ for (ComponentInstanceData instance : listOfInstances) {
+ // check if already have calculated edges
+ log.debug("start handle instance {}", instance.getUniqueId());
+ boolean needProcess = true;
+ Either<List<ImmutablePair<CapabilityData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao
+ .getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
+ instance.getUniqueId(), GraphEdgeLabels.CALCULATED_CAPABILITY, NodeTypeEnum.Capability,
+ CapabilityData.class);
+ if (vfci.isRight()) {
+ if (!vfci.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}",
+ instance.getComponentInstDataDefinition().getComponentUid(),
+ vfci.right().value());
+ return false;
+ }
+ } else {
+ if (vfci.left().value().size() > 0) {
+ needProcess = false;
+ }
+ }
+ Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReq = titanGenericDao
+ .getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
+ instance.getUniqueId(), GraphEdgeLabels.CALCULATED_REQUIREMENT, NodeTypeEnum.Requirement,
+ RequirementData.class);
+ if (vfciReq.isRight()) {
+ if (!vfciReq.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}",
+ instance.getComponentInstDataDefinition().getComponentUid(),
+ vfciReq.right().value());
+ return false;
+ }
+ } else {
+ if (vfciReq.left().value().size() > 0) {
+ needProcess = false;
+ }
+ }
+ Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReqFF = titanGenericDao
+ .getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
+ instance.getUniqueId(), GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED,
+ NodeTypeEnum.Requirement, RequirementData.class);
+ if (vfciReqFF.isRight()) {
+
+ if (!vfciReqFF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource "
+ + instance.getComponentInstDataDefinition().getComponentUid() + " error "
+ + vfciReqFF.right().value());
+ return false;
+ }
+ } else {
+ if (vfciReqFF.left().value().size() > 0) {
+ needProcess = false;
+ }
+ }
+
+ if (needProcess == false) {
+ log.debug("updateCalculatedEdges : for instance {} calculated capabilty/requirement already created", instance.getUniqueId());
+ continue;
+ }
+ String originId = instance.getComponentInstDataDefinition().getComponentUid();
+ Either<Resource, StorageOperationStatus> resourceE = resourceOperation.getResource(originId, true);
+ if (resourceE.isRight()) {
+ log.debug("updateCalculatedEdges failed to fetch origin resource with id {} error: {}", originId, resourceE.right().value());
+ return false;
+ }
+ Resource resource = resourceE.left().value();
+ Map<String, List<RequirementDefinition>> requirements = resource.getRequirements();
+ if (createCalculatedRequirementsForInstance(instance, requirements) != true) {
+ return false;
+ }
+ Map<String, List<CapabilityDefinition>> capabilities = resource.getCapabilities();
+ if (createCalculatedCapabilitiesForInstance(instance, capabilities) != true) {
+ return false;
+ }
+ log.debug("finish handle instance {}", instance.getUniqueId());
+ }
+ log.debug("update calculated edges ENDED");
+ return true;
+ }
+
+ private boolean createCalculatedCapabilitiesForInstance(ComponentInstanceData instance,
+ Map<String, List<CapabilityDefinition>> capabilities) {
+ for (Map.Entry<String, List<CapabilityDefinition>> entry : capabilities.entrySet()) {
+ for (CapabilityDefinition capability : entry.getValue()) {
+ Either<CapabilityData, TitanOperationStatus> capNode = titanGenericDao.getNode(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Capability), capability.getUniqueId(),
+ CapabilityData.class);
+ if (capNode.isRight()) {
+ log.debug("createCalculatedCapabilitiesForInstance failed to fetch capability node with id "
+ + capability.getUniqueId() + " error " + capNode.right().value());
+ return false;
+ }
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), capability.getName());
+ if (fillEdgeProperties(instance, props) != true) {
+ return false;
+ }
+
+ Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance,
+ capNode.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
+ if (createRelation.isRight()) {
+ TitanOperationStatus titanOperationStatus = createRelation.right().value();
+ log.debug(
+ "Failed to create calculated requirement from component instance {} to requirement {}, error: {}",
+ instance.getUniqueId(), capNode.left().value().getUniqueId(), titanOperationStatus);
+ return false;
+ }
+ log.debug("CALCULATED_CAPABILITY was created from {} to {} with props: {}", capNode.left().value().getUniqueId(), instance.getUniqueId(), props);
+ }
+ }
+ return true;
+ }
+
+ private boolean fillEdgeProperties(ComponentInstanceData instance, Map<String, Object> props) {
+ if (instance.getComponentInstDataDefinition().getOriginType().equals(OriginTypeEnum.VF)) {
+ Either<List<ImmutablePair<ComponentInstanceData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao
+ .getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
+ instance.getComponentInstDataDefinition().getComponentUid(), GraphEdgeLabels.RESOURCE_INST,
+ NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
+ if (vfci.isRight()) {
+ log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}",
+ instance.getComponentInstDataDefinition().getComponentUid(),
+ vfci.right().value());
+ return false;
+ }
+ ImmutablePair<ComponentInstanceData, GraphEdge> immutablePair = vfci.left().value().get(0);
+ String vfciId = immutablePair.getLeft().getUniqueId();
+ props.put(GraphEdgePropertiesDictionary.OWNER_ID.getProperty(), vfciId);
+ props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(),
+ immutablePair.getLeft().getComponentInstDataDefinition().getComponentUid());
+
+ } else {
+ props.put(GraphEdgePropertiesDictionary.OWNER_ID.getProperty(), instance.getUniqueId());
+ props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(),
+ instance.getComponentInstDataDefinition().getComponentUid());
+ }
+ return true;
+ }
+
+ private boolean createCalculatedRequirementsForInstance(ComponentInstanceData instance,
+ Map<String, List<RequirementDefinition>> requirements) {
+ for (Map.Entry<String, List<RequirementDefinition>> entry : requirements.entrySet()) {
+ for (RequirementDefinition requirement : entry.getValue()) {
+ Either<RequirementData, TitanOperationStatus> reqNode = titanGenericDao.getNode(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Requirement), requirement.getUniqueId(),
+ RequirementData.class);
+ if (reqNode.isRight()) {
+ log.debug("updateCalculatedEdges failed to fetch requirement node with id {} error: {}", requirement.getUniqueId(), reqNode.right().value());
+ return false;
+ }
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), requirement.getName());
+
+ if (fillEdgeProperties(instance, props) != true) {
+ return false;
+ }
+
+ Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance,
+ reqNode.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
+ if (createRelation.isRight()) {
+ TitanOperationStatus titanOperationStatus = createRelation.right().value();
+ log.debug(
+ "Failed to create calculated requirement from component instance {} to requirement {}, error: {}",
+ instance.getUniqueId(), reqNode.left().value().getUniqueId(), titanOperationStatus);
+ return false;
+ }
+ log.debug("CALCULATED_REQUIREMENT was created from {} to {} with props: {}", reqNode.left().value().getUniqueId(), instance.getUniqueId(), props);
+ }
+ }
+ return true;
+ }
+
+ private boolean updateRelations() {
+ log.debug("update relations and edges STARTED");
+ Either<List<RelationshipInstData>, TitanOperationStatus> allRelations = titanGenericDao
+ .getByCriteria(NodeTypeEnum.RelationshipInst, null, RelationshipInstData.class);
+ if (allRelations.isRight()) {
+ if (allRelations.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("updateRelations : No relations to update. updateRelations ENDED");
+ return true;
+ }
+ log.debug("updateRelations : failed to fetch all relation nodes , error ", allRelations.right().value());
+ return false;
+ }
+ for (RelationshipInstData rel : allRelations.left().value()) {
+ // rel.set
+ if (rel.getCapabilityOwnerId() != null && rel.getRequirementOwnerId() != null) {
+ log.debug("updateRelations : for relation {} all fields alredy fixed -> {}", rel.getUniqueId(), rel);
+ continue;
+ }
+ // update capability parameters
+ if (updateCapabiltyFieldsInRelation(rel) != true) {
+ return false;
+ }
+
+ // update requirement parameters and set calculated edge to full
+ // filled
+ if (updateRequirementFieldsInRelation(rel) != true) {
+ return false;
+ }
+
+ Either<RelationshipInstData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(rel,
+ RelationshipInstData.class);
+ if (updateNode.isRight()) {
+ log.debug("updateRelations : failed to update relation node with id {}, error: {}", rel.getUniqueId(), updateNode.right().value());
+ return false;
+ }
+ log.debug("Relations was updated with values {}", rel);
+ }
+ log.debug("update relations and edges ENDED");
+ return true;
+ }
+
+ private boolean updateRequirementFieldsInRelation(RelationshipInstData rel) {
+ Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> reqInst = titanGenericDao
+ .getParentNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst), rel.getUniqueId(),
+ GraphEdgeLabels.RELATIONSHIP_INST, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
+ if (reqInst.isRight()) {
+ log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error: {}", rel.getUniqueId(), reqInst.right().value());
+ return false;
+ }
+ ComponentInstanceData requirementInstanceData = reqInst.left().value().getLeft();
+ ComponentInstanceDataDefinition reqRI = requirementInstanceData.getComponentInstDataDefinition();
+ if (reqRI.getOriginType().equals(OriginTypeEnum.VF)) {
+ Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao
+ .getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
+ reqRI.getComponentUid(), GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance,
+ ComponentInstanceData.class, null);
+ if (vfcInstInOrigVf.isRight()) {
+ log.debug("updateRelations : failed to fetch VFC instance in origin VF with id {}, error: {}", reqRI.getComponentUid(), vfcInstInOrigVf.right().value());
+ return false;
+ }
+ rel.setRequirementOwnerId(vfcInstInOrigVf.left().value().getLeft().getUniqueId());
+ } else {
+ rel.setRequirementOwnerId(reqRI.getUniqueId());
+ }
+ // get vertex
+ Either<TitanVertex, TitanOperationStatus> vertexReqRI = titanGenericDao.getVertexByProperty(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), requirementInstanceData.getUniqueId());
+ if (vertexReqRI.isRight()) {
+ log.debug("updateRelations : failed to fetch veterx for instance {}, error: {}", requirementInstanceData.getUniqueId(), vertexReqRI.right().value());
+ return false;
+ }
+ String[] splitIds = rel.getUniqueId().split("\\.");
+ String reqName = splitIds[splitIds.length - 1];
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), reqName);
+ Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao
+ .getOutgoingEdgesByCriteria(vertexReqRI.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
+ if (edgesForNode.isRight()) {
+ log.debug("updateRelations : failed to fetch edges for instance {}, error: {}", requirementInstanceData.getUniqueId(), edgesForNode.right().value());
+ return false;
+ }
+ Edge edge = edgesForNode.left().value().get(0);
+ String reqId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(),
+ GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+ rel.setRequirementId(reqId);
+
+ // change edge label
+ TitanEdge newEdge = (TitanEdge) vertexReqRI.left().value()
+ .addEdge(GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED.getProperty(), edge.inVertex());
+ titanGenericDao.setProperties(newEdge, titanGenericDao.getProperties(edge));
+ edge.remove();
+
+ log.debug("Edge was changed to CALCULATED_REQUIREMENT_FULLFILLED for relation between {} and {}", reqId, requirementInstanceData.getUniqueId());
+
+ return true;
+ }
+
+ public boolean updateCapabiltyFieldsInRelation(RelationshipInstData rel) {
+ // update capability parameters
+ Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> capInst = titanGenericDao
+ .getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst),
+ rel.getUniqueId(), GraphEdgeLabels.CAPABILITY_NODE, NodeTypeEnum.ResourceInstance,
+ ComponentInstanceData.class, null);
+ if (capInst.isRight()) {
+ log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error: {}", rel.getUniqueId(), capInst.right().value());
+ return false;
+ }
+ ComponentInstanceData capabiltyInstanceData = capInst.left().value().getLeft();
+ ComponentInstanceDataDefinition capRI = capabiltyInstanceData.getComponentInstDataDefinition();
+ if (capRI.getOriginType().equals(OriginTypeEnum.VF)) {
+ Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao
+ .getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
+ capRI.getComponentUid(), GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance,
+ ComponentInstanceData.class, null);
+ if (vfcInstInOrigVf.isRight()) {
+ log.debug("updateRelations : failed to fetch VFC instance in origin VF with id {}, error: {}", capRI.getComponentUid(), vfcInstInOrigVf.right().value());
+ return false;
+ }
+ rel.setCapabilityOwnerId(vfcInstInOrigVf.left().value().getLeft().getUniqueId());
+ } else {
+ rel.setCapabilityOwnerId(capRI.getUniqueId());
+ }
+
+ // get vertex
+ Either<TitanVertex, TitanOperationStatus> vertexCapRI = titanGenericDao.getVertexByProperty(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), capabiltyInstanceData.getUniqueId());
+ if (vertexCapRI.isRight()) {
+ log.debug("updateRelations : failed to fetch veterx for instance {}, error: {}", capabiltyInstanceData.getUniqueId(), vertexCapRI.right().value());
+ return false;
+ }
+ // String[] splitIds = rel.getUniqueId().split("\\.");
+ String capName = (String) capInst.left().value().getRight().getProperties()
+ .get(GraphEdgePropertiesDictionary.NAME.getProperty());// splitIds[splitIds.length
+ // - 1];
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), capName);
+ Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao
+ .getOutgoingEdgesByCriteria(vertexCapRI.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
+ if (edgesForNode.isRight()) {
+ log.debug("updateRelations : failed to fetch edges for instance {}, error: {}", capabiltyInstanceData.getUniqueId(), edgesForNode.right().value());
+ return false;
+ }
+ Edge edge = edgesForNode.left().value().get(0);
+ String capId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(),
+ GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+ rel.setCapabiltyId(capId);
+
+ return true;
+ }
+
+ // private boolean fixDerivedFv() {
+ // Map<String, Object> props = new HashMap<String, Object>();
+ // props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(),
+ // ResourceTypeEnum.VF.name());
+ // Either<List<ResourceMetadataData>, TitanOperationStatus> allVF =
+ // titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props,
+ // ResourceMetadataData.class);
+ // if (allVF.isRight() &&
+ // !allVF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ // log.debug("fixDerivedFv failed fetch all VF resources,error {}", allVF.right().value());
+ // return false;
+ // }
+ // if ( allVF.right().value().equals(TitanOperationStatus.NOT_FOUND) ){
+ // log.debug("fixDerivedFv - no VF");
+ // return true;
+ // }
+ // Set<String> finishedResources = new HashSet<>();
+ //
+ // for (ResourceMetadataData metadata : allVF.left().value()) {
+ // ComponentMetadataDataDefinition metadataDD =
+ // metadata.getMetadataDataDefinition();
+ //
+ // if (!finishedResources.contains(metadataDD.getUniqueId())) {
+ // Either<List<String>, StorageOperationStatus> processedIds =
+ // handleVfGroup(metadata);
+ // if (processedIds.isRight()) {
+ // log.debug("fixDerivedFv failed to process FV group {}", processedIds.right().value());
+ // return false;
+ // }
+ // finishedResources.addAll(processedIds.left().value());
+ // }
+ // }
+ // return true;
+ // }
+
+ private Either<List<String>, StorageOperationStatus> handleVfGroup(ResourceMetadataData metadata) {
+ Map<String, Object> props = new HashMap<String, Object>();
+ props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+ props.put(GraphPropertiesDictionary.NAME.getProperty(), metadata.getMetadataDataDefinition().getName());
+
+ List<String> finished = new ArrayList<>();
+
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByName = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (allVFByName.isRight()) {
+ log.debug("fixDerivedFv failed fetch all VF resources,error {}", allVFByName.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ Set<String> nonDuplicatedId = new HashSet<>();
+ String uuid10 = null;
+ for (ResourceMetadataData mdata : allVFByName.left().value()) {
+ String version = mdata.getMetadataDataDefinition().getVersion();
+ if (version.equals("1.0")) {
+ uuid10 = mdata.getMetadataDataDefinition().getUUID();
+ // break;
+ }
+ nonDuplicatedId.add((String) mdata.getUniqueId());
+ }
+ if (uuid10 == null) {
+ uuid10 = allVFByName.left().value().get(0).getMetadataDataDefinition().getUUID();
+ }
+ props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+ props.put(GraphPropertiesDictionary.UUID.getProperty(), uuid10);
+
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByUUID = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (allVFByUUID.isRight()) {
+ log.debug("fixDerivedFv failed fetch all VF resources by UUID {}, error: {}", uuid10, allVFByUUID.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ for (ResourceMetadataData mdata : allVFByUUID.left().value()) {
+ nonDuplicatedId.add((String) mdata.getUniqueId());
+ }
+ Either<TitanGraph, TitanOperationStatus> graph = titanGenericDao.getGraph();
+ if (graph.isRight()) {
+ log.debug("fixDerivedFv failed - No titan graph ,error {}", graph.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ // Map<String, String> derivedMapping = new HashMap<>();
+ for (String resourceId : nonDuplicatedId) {
+ // StorageOperationStatus handleSingleVfResult =
+ // handleSingleVf(finished, derivedMapping, resourceId);
+ StorageOperationStatus handleSingleVfResult = handleSingleVf(finished, resourceId);
+ if (!handleSingleVfResult.equals(StorageOperationStatus.OK)) {
+ log.debug("fixDerivedFv failed - handleSingleVfResult failed for resource {}, error: {}", resourceId, handleSingleVfResult);
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ }
+ return Either.left(finished);
+ }
+
+ // private StorageOperationStatus handleSingleVf(List<String> finished,
+ // Map<String, String> derivedMapping, String resourceId) {
+ private StorageOperationStatus handleSingleVf(List<String> finished, String resourceId) {
+ Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao
+ .getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), resourceId);
+ if (vertexByProperty.isRight()) {
+ log.debug("fixDerivedFv failed to fetch resource by id {}, error: {}", resourceId, vertexByProperty.right().value());
+ return StorageOperationStatus.GENERAL_ERROR;
+ }
+ Vertex vertexR = vertexByProperty.left().value();
+ Iterator<Vertex> vertexDIter = vertexR.vertices(Direction.OUT, GraphEdgeLabels.DERIVED_FROM.getProperty());
+ if (vertexDIter != null && vertexDIter.hasNext()) {
+ // move edges
+ // must be only one
+ TitanVertex vertexD = (TitanVertex) vertexDIter.next();
+ String idDerived = (String) titanGenericDao.getProperty(vertexD,
+ GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+
+ // TODO clone resource
+
+ // TODO add instance of new resource to VF
+
+ // add to vf instance of vfc
+ finished.add(resourceId);
+ } else {
+ log.debug("No derived edges for resource id {}", resourceId);
+ }
+ return StorageOperationStatus.OK;
+ }
+
+ private boolean updateComponentInstanceType() {
+ log.debug("update component instances type STARTED");
+ Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao
+ .getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
+ if (allInstances.isRight()) {
+ if (allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("updateComponentInstanceType: no instances ti update ");
+ return true;
+ }
+ log.debug("updateComponentInstanceType failed fetch all resource instances ,error {}", allInstances.right().value());
+ return false;
+ }
+
+ List<ComponentInstanceData> listOfInstances = allInstances.left().value();
+ for (ComponentInstanceData instance : listOfInstances) {
+ String originId = instance.getComponentInstDataDefinition().getComponentUid();
+ Either<ComponentMetadataData, TitanOperationStatus> nodeResource = titanGenericDao.getNode(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), originId, ComponentMetadataData.class);
+ if (nodeResource.isRight()) {
+ log.debug("updateComponentInstanceType failed to fetch origin resource with id {}, error: {}", originId, nodeResource.right().value());
+ return false;
+ }
+ ResourceTypeEnum resourceType = ((ResourceMetadataDataDefinition) nodeResource.left().value()
+ .getMetadataDataDefinition()).getResourceType();
+ if (resourceType == null) {
+ log.debug("updateComponentInstanceType failed, no resource type for origin resource with id {}", originId);
+ return false;
+ }
+ OriginTypeEnum originType;
+ switch (resourceType) {
+ case VF:
+ originType = OriginTypeEnum.VF;
+ break;
+ case VFC:
+ originType = OriginTypeEnum.VFC;
+ break;
+ case VL:
+ originType = OriginTypeEnum.VL;
+ break;
+ case CP:
+ originType = OriginTypeEnum.CP;
+ break;
+ default:
+ log.debug("updateComponentInstanceType failed, no supported resource type {} for origin resource with id {}", resourceType, originId);
+ return false;
+ }
+ instance.getComponentInstDataDefinition().setOriginType(originType);
+
+ Either<ComponentInstanceData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(instance,
+ ComponentInstanceData.class);
+ if (updateNode.isRight()) {
+ log.debug("updateComponentInstanceType failed, failed to update component instance node with id {}, error: {}", instance.getUniqueId(), updateNode.right().value());
+ return false;
+ }
+ log.debug("For instance with id {} the origin type was detected as {}", instance.getUniqueId(), originType);
+ }
+ log.debug("update component instances type ENDED");
+ return true;
+ }
+
+ private boolean addResourceCounterToResources() {
+
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
+ if (allResources.isRight()) {
+ if (allResources.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("addResourceCounterToResources - no resources");
+ return true;
+ }
+ log.debug("addResourceCounterToResources failed fetch all resources,error {}", allResources.right().value());
+ return false;
+ }
+ for (ResourceMetadataData resource : allResources.left().value()) {
+ Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), resource.getUniqueId());
+ if (vertexByProperty.isRight()) {
+ log.error("failed to add instanceCounter to VF {} . error is: {}", resource.getUniqueId(),
+ vertexByProperty.right().value().name());
+ return false;
+ }
+ Vertex vfVertex = vertexByProperty.left().value();
+ if (!vfVertex.property(GraphPropertiesDictionary.INSTANCE_COUNTER.getProperty()).isPresent()) {
+ vfVertex.property(GraphPropertiesDictionary.INSTANCE_COUNTER.getProperty(), 0);
+ }
+ }
+ return true;
+ }
+
+ private boolean fixDerivedVf() {
+
+ Map<String, Object> props = new HashMap<String, Object>();
+ props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allVF = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (allVF.isRight()) {
+ if (allVF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+ log.debug("fixDerivedVf - no VFs");
+ return true;
+ }
+ log.debug("fixDerivedFv failed fetch all VF resources,error {}", allVF.right().value());
+ return false;
+ }
+
+ Map<String, String> vfUuidToVfcUuid = new HashMap<String, String>();
+ for (ResourceMetadataData metadata : allVF.left().value()) {
+ Either<Resource, StorageOperationStatus> eitherResource = resourceOperation
+ .getResource(metadata.getMetadataDataDefinition().getUniqueId(), true);
+ if (eitherResource.isRight()) {
+ log.error("failed to migrate VF {} from version 1602 to version 1604. error is: {}",
+ metadata.getMetadataDataDefinition().getUniqueId(), eitherResource.right().value().name());
+ return false;
+ }
+ Resource vfResource = eitherResource.left().value();
+ if (vfResource.getDerivedFrom() == null || vfResource.getDerivedFrom().isEmpty()) {
+ continue;
+ }
+ Boolean isVfDeleted = vfResource.getIsDeleted();
+ String vfUUID = vfResource.getUUID();
+ String vfcUUID = vfUuidToVfcUuid.getOrDefault(vfUUID, null);
+ if (vfcUUID == null) {
+ vfcUUID = UUID.randomUUID().toString();
+ vfUuidToVfcUuid.put(vfUUID, vfcUUID);
+ }
+
+ // handle lifecycle
+ String vfUniqueId = vfResource.getUniqueId();
+ LifecycleStateEnum vfcTargetState = vfResource.getLifecycleState();
+ if (vfcTargetState.equals(LifecycleStateEnum.READY_FOR_CERTIFICATION)
+ || vfcTargetState.equals(LifecycleStateEnum.CERTIFICATION_IN_PROGRESS)) {
+ User user = new User();
+ user.setUserId(vfResource.getLastUpdaterUserId());
+ Either<? extends Component, StorageOperationStatus> checkinComponent = lifecycleOperaion
+ .checkinComponent(NodeTypeEnum.Resource, vfResource, user, user, true);
+ if (checkinComponent.isRight()) {
+ log.error("failed to checkin VF {}. error={}", vfUniqueId, checkinComponent.right().value().name());
+ return false;
+ }
+ } else if (vfcTargetState.equals(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT)) {
+ vfcTargetState = LifecycleStateEnum.NOT_CERTIFIED_CHECKIN;
+ }
+
+ // delete VF Properties
+ List<PropertyDefinition> properties = vfResource.getProperties();
+ if (properties != null && !properties.isEmpty()) {
+ Either<Map<String, PropertyDefinition>, StorageOperationStatus> deleteAllProperties = propertyOperation
+ .deleteAllPropertiesAssociatedToNode(NodeTypeEnum.Resource, vfUniqueId);
+ if (deleteAllProperties.isRight()
+ && !deleteAllProperties.right().value().equals(StorageOperationStatus.NOT_FOUND)
+ && !deleteAllProperties.right().value().equals(StorageOperationStatus.OK)) {
+ log.error("failed to delete properties of VF {} . error is: {}",
+ metadata.getMetadataDataDefinition().getUniqueId(),
+ deleteAllProperties.right().value().name());
+ return false;
+ }
+ }
+ // delete VF Additional Info
+ List<AdditionalInformationDefinition> additionalInformation = vfResource.getAdditionalInformation();
+ if (additionalInformation != null && !additionalInformation.isEmpty()) {
+ Either<AdditionalInformationDefinition, StorageOperationStatus> deleteAllAdditionalInformationParameters = additionalInformationOperation
+ .deleteAllAdditionalInformationParameters(NodeTypeEnum.Resource, vfUniqueId, true);
+ if (deleteAllAdditionalInformationParameters.isRight()
+ && !deleteAllAdditionalInformationParameters.right().value().equals(StorageOperationStatus.OK)
+ && !deleteAllAdditionalInformationParameters.right().value()
+ .equals(StorageOperationStatus.NOT_FOUND)) {
+ log.error("failed to delete properties of VF {} . error is: {}",
+ metadata.getMetadataDataDefinition().getUniqueId(),
+ deleteAllAdditionalInformationParameters.right().value().name());
+ return false;
+ }
+ }
+ // delete VF derivedFrom
+ GraphRelation derivedFromRelation = new GraphRelation(GraphEdgeLabels.DERIVED_FROM.getProperty());
+ derivedFromRelation.setFrom(new RelationEndPoint(NodeTypeEnum.Resource,
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), vfUniqueId));
+ Either<GraphRelation, TitanOperationStatus> deleteDerivedFromRelation = titanGenericDao
+ .deleteOutgoingRelation(derivedFromRelation);
+ if (deleteDerivedFromRelation.isRight()) {
+ log.error("failed to delete derivedFrom relation of VF {} . error is: {}",
+ metadata.getMetadataDataDefinition().getUniqueId(),
+ deleteDerivedFromRelation.right().value().name());
+ return false;
+ }
+
+ // create VFC
+ Either<Resource, StorageOperationStatus> createVFC = createVFC(metadata, vfResource, vfcUUID,
+ vfcTargetState);
+ if (createVFC.isRight()) {
+ log.error("failed to split VF {} to VFC. error is: {}",
+ metadata.getMetadataDataDefinition().getUniqueId(), createVFC.right().value().name());
+ return false;
+ }
+ Resource vfcResource = createVFC.left().value();
+ if (!createVfcInstanceOnVf(vfcResource, vfUniqueId)) {
+ return false;
+ }
+ // update VFC to deleted if required
+ if (isVfDeleted != null && isVfDeleted) {
+ Either<Component, StorageOperationStatus> markResourceToDelete = resourceOperation
+ .markComponentToDelete(vfcResource, true);
+ if (markResourceToDelete.isRight()) {
+ log.error("failed to mark isDeleted on VFC {} . error is: {}", vfcResource.getUniqueId(),
+ markResourceToDelete.right().value().name());
+ return false;
+ }
+ }
+
+ }
+ return true;
+ }
+
+ private Either<Resource, StorageOperationStatus> createVFC(ResourceMetadataData metadata, Resource vfcResource,
+ String uuid, LifecycleStateEnum vfcTargetState) {
+
+ Boolean highestVersion = vfcResource.isHighestVersion();
+ // Resource vfcResource = new Resource((ResourceMetadataDefinition)
+ // vfResource.getComponentMetadataDefinition());
+ // String componentName = vfcResource.getName()+"VFC";
+ // vfcResource.setName(componentName);
+ // vfcResource.setNormalizedName(ValidationUtils.normaliseComponentName(componentName));
+ // vfcResource.setSystemName(ValidationUtils.convertToSystemName(componentName));
+ vfcResource.setUniqueId(null);
+ vfcResource.setUUID(uuid);
+ vfcResource.setAllVersions(null);
+ vfcResource.setArtifacts(null);
+ vfcResource.setDeploymentArtifacts(null);
+ vfcResource.setComponentInstances(null);
+ vfcResource.setComponentInstancesProperties(null);
+ vfcResource.setComponentInstancesRelations(null);
+ vfcResource.setResourceType(ResourceTypeEnum.VFC);
+ vfcResource.setIsDeleted(false);
+
+ vfcResource.setLifecycleState(vfcTargetState);
+ // vfcResource.setDerivedFrom(vfResource.getDerivedFrom());
+ // vfcResource.setProperties(vfResource.getProperties());
+ // vfcResource.setAdditionalInformation(vfResource.getAdditionalInformation());
+ // vfcResource.setCategories(vfResource.getCategories());
+ // vfcResource.setTags(vfResource.getTags());
+
+ Either<Resource, StorageOperationStatus> createResource = resourceOperation.createResource(vfcResource, true);
+ if (createResource.isRight()) {
+ return createResource;
+ }
+ Resource afterCreateResource = createResource.left().value();
+ Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(
+ UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), afterCreateResource.getUniqueId());
+ if (vertexByProperty.isRight()) {
+ return createResource;
+ }
+ Vertex newVfcVertex = vertexByProperty.left().value();
+ newVfcVertex.property(GraphPropertiesDictionary.UUID.getProperty(), uuid);
+ if (!highestVersion) {
+ newVfcVertex.property(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), false);
+ }
+ return createResource;
+ }
+
+ private boolean createVfcInstanceOnVf(Resource vfcResource, String vfUniqueId) {
+ // create VFC instance on VF
+ ComponentInstance componentInstance = new ComponentInstance();
+ componentInstance.setComponentUid(vfcResource.getUniqueId());
+ componentInstance.setPosX("550");
+ componentInstance.setPosY("300");
+ componentInstance.setName(vfcResource.getName());
+ componentInstance.setIcon(vfcResource.getIcon());
+ componentInstance.setToscaComponentName(vfcResource.getToscaResourceName());
+ Either<String, Boolean> handleNameLogic = handleNameLogic(componentInstance, vfUniqueId, vfcResource.getName());
+ if (handleNameLogic.isRight()) {
+ log.error("failed to create logical name for vfc instance");
+ return false;
+ }
+ Either<ComponentInstance, StorageOperationStatus> createComponentInstance = componentInstanceOperaion
+ .createComponentInstance(vfUniqueId, NodeTypeEnum.Resource, handleNameLogic.left().value(),
+ componentInstance, NodeTypeEnum.Resource, true);
+
+ if (createComponentInstance.isRight()) {
+ log.error("failed to create vfc instance on vf {}. error: {}", vfUniqueId,
+ createComponentInstance.right().value().name());
+ return false;
+ }
+ return true;
+ }
+
+ private Either<String, Boolean> handleNameLogic(ComponentInstance componentInstance, String containerComponentId,
+ String resourceName) {
+
+ Either<Integer, StorageOperationStatus> componentInNumberStatus = resourceOperation
+ .increaseAndGetComponentInstanceCounter(containerComponentId, true);
+
+ if (componentInNumberStatus.isRight()) {
+ log.debug("Failed to get component instance number for container component {} ", containerComponentId);
+ return Either.right(false);
+ }
+ String resourceInNumber = componentInNumberStatus.left().value().toString();
+ componentInstance.setComponentName(resourceName);
+ componentInstance.setName(resourceName);
+ String logicalName = componentInstanceOperaion.createComponentInstLogicalName(resourceInNumber, resourceName);
+
+ Boolean eitherValidation = validateComponentInstanceName(logicalName, componentInstance, true);
+ if (!eitherValidation) {
+ return Either.right(false);
+ }
+
+ return Either.left(resourceInNumber);
+ }
+
+ private Boolean validateComponentInstanceName(String resourceInstanceName, ComponentInstance resourceInstance,
+ boolean isCreate) {
+
+ if (!ValidationUtils.validateStringNotEmpty(resourceInstanceName)) {
+ return false;
+ }
+ resourceInstance.setNormalizedName(ValidationUtils.normaliseComponentInstanceName(resourceInstanceName));
+ if (!isCreate) {
+ if (!ValidationUtils.validateResourceInstanceNameLength(resourceInstanceName)) {
+ return false;
+ }
+ if (!ValidationUtils.validateResourceInstanceName(resourceInstanceName)) {
+ return false;
+ }
+ }
+
+ return true;
+
+ }
+
+ public boolean migrate1604to1607(String appConfigDir) {
+ log.debug("Started the migration procedure from version 1604 to version 1607 ...");
+ log.debug("Getting all resources with resources");
+ boolean result = false;
+ Either<Boolean, StorageOperationStatus> resourceEither = null;
+ try {
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
+ if (allResources.isRight()) {
+ log.error("Couldn't get resources from DB, error: {}", allResources.right().value());
+ result = false;
+ return result;
+ }
+ List<ResourceMetadataData> resourcesList = allResources.left().value();
+ if (resourcesList == null) {
+ log.error("Couldn't get resources from DB, no resources found");
+ result = false;
+ return result;
+ }
+ log.debug("Found {} resources", resourcesList.size());
+ for (ResourceMetadataData resource : resourcesList) {
+ String resourceName = resource.getMetadataDataDefinition().getName();
+ log.debug("Checking resource {}", resourceName);
+ if (isNormative(resourceName)) {
+ resourceEither = changeNormativeTypeName(resource);
+ if (resourceEither.isRight()) {
+ log.error("DB error during name changing");
+ result = false;
+ return result;
+ }
+ }
+ if (((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name()
+ .equals("VF")) {
+ resourceEither = setVfToscaResourceName(resource);
+ if (resourceEither.isRight()) {
+ log.error("DB error during tosca resource name setting");
+ result = false;
+ return result;
+ }
+ }
+ }
+ result = addInvariantUUIDs(appConfigDir);
+ } finally {
+ if (!result) {
+ titanGenericDao.rollback();
+ log.debug("**********************************************");
+ log.debug("The migration procedure from version 1604 to version 1607 FAILED!!");
+ log.debug("**********************************************");
+ } else {
+ titanGenericDao.commit();
+ log.debug("**********************************************");
+ log.debug("The migration procedure from version 1604 to version 1607 ended successfully!");
+ log.debug("**********************************************");
+ }
+ }
+
+ return result;
+ }
+
+ private boolean addInvariantUUIDs(String appConfigDir) {
+ log.debug("Started adding of InvariantUUID ...");
+ log.debug("Getting all resources with highest version");
+
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true);
+
+ List<ComponentMetadataData> fullComponentList = new ArrayList<ComponentMetadataData>();
+
+ // getting resources
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allHighestVersionResources = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (allHighestVersionResources.isRight()) {
+ log.error("Couldn't get resources with highest version from DB, error: {}",
+ allHighestVersionResources.right().value());
+ return false;
+ }
+ List<ResourceMetadataData> allHighestVersionResourcesAL = allHighestVersionResources.left().value();
+ if (allHighestVersionResourcesAL == null) {
+ log.error("Couldn't get resources with highest version from DB, no resources found");
+ return false;
+ }
+ log.debug("Found {} resources", allHighestVersionResourcesAL.size());
+ fullComponentList.addAll(allHighestVersionResourcesAL);
+
+ // getting services
+ Either<List<ServiceMetadataData>, TitanOperationStatus> allHighestVersionServices = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Service, props, ServiceMetadataData.class);
+ if (allHighestVersionServices.isRight()) {
+ log.error("Couldn't get services with highest version from DB, error: {}",
+ allHighestVersionServices.right().value());
+ return false;
+ }
+ List<ServiceMetadataData> allHighestVersionServicesAL = allHighestVersionServices.left().value();
+ if (allHighestVersionServicesAL == null) {
+ log.error("Couldn't get services with highest version from DB, no services found");
+ return false;
+ }
+ log.debug("Found {} services", allHighestVersionServicesAL.size());
+ fullComponentList.addAll(allHighestVersionServicesAL);
+
+ List<ComponentMetadataData> reducedComponentsAL = reduceHighestVersionResourcesList(fullComponentList);
+
+ // getting products
+ Either<List<ProductMetadataData>, TitanOperationStatus> allHighestVersionProducts = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Product, props, ProductMetadataData.class);
+ if (allHighestVersionProducts.isRight()) {
+ log.error("Couldn't get products with highest version from DB, error: {}",
+ allHighestVersionProducts.right().value());
+ return false;
+ }
+ List<ProductMetadataData> allHighestVersionProductsAL = allHighestVersionProducts.left().value();
+ if (allHighestVersionProductsAL == null) {
+ log.error("Couldn't get products with highest version from DB, no products found");
+ return false;
+ }
+ log.debug("Found {} products", allHighestVersionProductsAL.size());
+
+ List<ComponentMetadataData> fullProductList = new ArrayList<ComponentMetadataData>();
+ fullProductList.addAll(allHighestVersionProductsAL);
+ List<ComponentMetadataData> reducedProductAL = reduceHighestVersionResourcesList(fullProductList);
+
+ for (ComponentMetadataData product : reducedProductAL) {
+ if (!setProductInvariantUUIDIfExists((ProductMetadataData) product)) {
+ return false;
+ }
+ }
+ reducedComponentsAL.addAll(reducedProductAL);
+
+ log.debug("Reduced list of Highest Version Components contains {} components", reducedComponentsAL.size());
+ for (ComponentMetadataData componentMetaData : reducedComponentsAL) {
+
+ String invariantUUID = componentMetaData.getMetadataDataDefinition().getInvariantUUID();
+ log.debug("old invariantUUID {}", invariantUUID);
+ if (invariantUUID == null || invariantUUID.isEmpty()) {
+ invariantUUID = UniqueIdBuilder.buildInvariantUUID();
+ componentMetaData.getMetadataDataDefinition().setInvariantUUID(invariantUUID);
+ }
+ log.debug("new invariantUUID {}", componentMetaData.getMetadataDataDefinition().getInvariantUUID());
+ Either<ComponentMetadataData, TitanOperationStatus> updateNode = titanGenericDao
+ .updateNode(componentMetaData, ComponentMetadataData.class);
+ if (updateNode.isRight()) {
+ log.error("DB error during while updating component {}, error: {}",
+ componentMetaData.getMetadataDataDefinition().getName(), updateNode.right().value());
+ return false;
+ }
+ log.debug("updated invariantUUID {}",
+ updateNode.left().value().getMetadataDataDefinition().getInvariantUUID());
+ if (!isOnlyVersion(componentMetaData)) {
+ ComponentOperation componentOperation = null;
+ switch (NodeTypeEnum.getByName(componentMetaData.getLabel())) {
+ case Resource:
+ componentOperation = resourceOperation;
+ break;
+ case Service:
+ componentOperation = serviceOperation;
+ break;
+ case Product:
+ componentOperation = productOperation;
+ break;
+ default:
+ break;
+ }
+ Either<Component, StorageOperationStatus> getComponentResult = componentOperation
+ .getComponent((String) componentMetaData.getUniqueId(), true);
+ if (getComponentResult.isRight()) {
+ log.error("DB error during while getting component with uniqueID {}, error: {}",
+ componentMetaData.getUniqueId(), getComponentResult.right().value());
+ return false;
+ }
+ Component component = getComponentResult.left().value();
+ if (component == null) {
+ log.error("The component received from DB is empty");
+ return false;
+ }
+
+ Map<String, String> allVersions = component.getAllVersions();
+ log.debug("found {} versions for component {}", allVersions.size(), component.getName());
+ Either<Boolean, StorageOperationStatus> resEither = updateAllVersions(allVersions, invariantUUID);
+ if (resEither.isRight()) {
+ log.error("DB error during invariantUUID adding");
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ private boolean isOnlyVersion(ComponentMetadataData componentMetaData) {
+ String version = componentMetaData.getMetadataDataDefinition().getVersion();
+ if (version.equals("0.1"))
+ return true;
+ return false;
+ }
+
+ private boolean setProductInvariantUUIDIfExists(ProductMetadataData product) {
+ Either<TitanVertex, TitanOperationStatus> getVertexRes = titanGenericDao
+ .getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), product.getUniqueId());
+ if (getVertexRes.isRight()) {
+ log.error("DB error during retrieving product vertex {}", product.getMetadataDataDefinition().getName());
+ return false;
+ }
+ Vertex productVertex = getVertexRes.left().value();
+ String invariantUUID = productVertex.value(GraphPropertiesDictionary.CONSTANT_UUID.getProperty());
+ if (invariantUUID != null && !invariantUUID.isEmpty()) {
+ product.getMetadataDataDefinition().setInvariantUUID(invariantUUID);
+ }
+ return true;
+ }
+
+ private Either<Boolean, StorageOperationStatus> updateAllVersions(Map<String, String> allVersions,
+ String invariantUUID) {
+
+ if (allVersions != null) {
+ for (String uniqueID : allVersions.values()) {
+ Either<ComponentMetadataData, TitanOperationStatus> getNodeResult = titanGenericDao.getNode(
+ GraphPropertiesDictionary.UNIQUE_ID.getProperty(), uniqueID, ComponentMetadataData.class);
+ if (getNodeResult.isRight()) {
+ log.error("DB error during while getting component with uniqueID {}, error: {}", uniqueID,
+ getNodeResult.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ ComponentMetadataData component = getNodeResult.left().value();
+ component.getMetadataDataDefinition().setInvariantUUID(invariantUUID);
+ Either<ComponentMetadataData, TitanOperationStatus> updateNodeResult = titanGenericDao
+ .updateNode(component, ComponentMetadataData.class);
+ log.debug("updated child invariantUUID {}",
+ updateNodeResult.left().value().getMetadataDataDefinition().getInvariantUUID());
+ if (updateNodeResult.isRight()) {
+ log.error("DB error during while updating component {}, error: {}",
+ component.getMetadataDataDefinition().getName(), updateNodeResult.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ }
+ }
+ return Either.left(true);
+ }
+
+ private List<ComponentMetadataData> reduceHighestVersionResourcesList(
+ List<ComponentMetadataData> allHighestVersionResources) {
+ List<ComponentMetadataData> resultList = null;
+ Map<String, ComponentMetadataData> resultHM = new HashMap<String, ComponentMetadataData>();
+ for (ComponentMetadataData resource : allHighestVersionResources) {
+ if (resource.getMetadataDataDefinition().getInvariantUUID() != null
+ && !resource.getMetadataDataDefinition().getInvariantUUID().isEmpty()) {
+ log.debug("invariantUUID {} ", resource.getMetadataDataDefinition().getInvariantUUID());
+ continue;
+ }
+ String curUUID = resource.getMetadataDataDefinition().getUUID();
+ if (resultHM.containsKey(curUUID)) {
+ int isHighest = resultHM.get(curUUID).getMetadataDataDefinition().getVersion()
+ .compareTo(resource.getMetadataDataDefinition().getVersion());
+ if (isHighest > 0) {
+ log.debug("version {} is great than {} ",
+ resultHM.get(curUUID).getMetadataDataDefinition().getVersion(),
+ resource.getMetadataDataDefinition().getVersion());
+ continue;
+ }
+ }
+ resultHM.put(curUUID, resource);
+ }
+ resultList = new ArrayList<ComponentMetadataData>(resultHM.values());
+ return resultList;
+ }
+
+ private boolean isNormative(String resourceName) {
+ for (int i = 0; i < NORMATIVE_OLD_NAMES.length; ++i) {
+ if (NORMATIVE_OLD_NAMES[i].equals(resourceName))
+ return true;
+ }
+ return false;
+ }
+
+ private Either<Boolean, StorageOperationStatus> changeNormativeTypeName(ResourceMetadataData resource) {
+
+ String resourceName = resource.getMetadataDataDefinition().getName();
+
+ if (resourceName != null && !resourceName.isEmpty()) {
+ log.debug("Found normative type to change - {}", resourceName);
+ String oldName = resourceName;
+ String[] splitedName = resourceName.split("\\.");
+ String newName = splitedName[splitedName.length - 1];
+ String newSystemName = ValidationUtils.convertToSystemName(newName);
+ String newNormalizedName = ValidationUtils.normaliseComponentName(newName);
+ log.debug("Setting name to be {}", newName);
+
+ resource.getMetadataDataDefinition().setName(newName);
+ log.debug("Setting system name to be {}", newSystemName);
+ resource.getMetadataDataDefinition().setSystemName(newSystemName);
+ log.debug("Setting normalized name to be {}", newNormalizedName);
+ resource.getMetadataDataDefinition().setNormalizedName(newNormalizedName);
+ log.debug("Updating tag in metadata to be {}", newName);
+ resource.getMetadataDataDefinition().getTags().remove(oldName);
+ resource.getMetadataDataDefinition().getTags().add(newName);
+
+ log.debug("Creating tag node with name {}", newName);
+ TagData tagData = new TagData();
+ tagData.setName(newName);
+ Either<TagData, TitanOperationStatus> createNode = titanGenericDao.createNode(tagData, TagData.class);
+ if (createNode.isRight()) {
+ log.error("Error while creating tag node {}, error: {}.", newName, createNode.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+
+ Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource,
+ ResourceMetadataData.class);
+ if (updateNode.isRight()) {
+ log.error("DB error during while updating normative type {}, error: {}",
+ resource.getMetadataDataDefinition().getName(), updateNode.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ log.debug("Normative type {} was successfully updated", resource.getMetadataDataDefinition().getName());
+ return Either.left(true);
+ }
+
+ return Either.left(false);
+ }
+
+ private Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName(ResourceMetadataData resource,
+ String toscaResourceName) {
+ if (toscaResourceName == null) {
+ toscaResourceName = CommonBeUtils.generateToscaResourceName(
+ ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name(),
+ resource.getMetadataDataDefinition().getSystemName());
+ }
+ Either<Boolean, StorageOperationStatus> validateToscaResourceNameExists = resourceOperation
+ .validateToscaResourceNameExists(toscaResourceName);
+ if (validateToscaResourceNameExists.isRight()) {
+ StorageOperationStatus storageOperationStatus = validateToscaResourceNameExists.right().value();
+ log.error("Couldn't validate toscaResourceName uniqueness - error: {}", storageOperationStatus);
+ return Either.right(storageOperationStatus);
+ }
+ if (validateToscaResourceNameExists.left().value()) {
+ log.debug("Setting tosca resource name to be {}", toscaResourceName);
+ ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition())
+ .setToscaResourceName(toscaResourceName);
+ return Either.left(true);
+ } else {
+ // As agreed with Renana - cannot be fixed automatically
+ log.warn("toscaResourceName {} is not unique! Cannot set it. Continuing...");
+ return Either.left(false);
+ }
+ }
+
+ public boolean testRemoveHeatPlaceHolders(String appConfigDir) {
+
+ if (!AllowMultipleHeats.removeAndUpdateHeatPlaceHolders(titanGenericDao, log, false)) {
+ log.error("Failed to update heat place holders");
+ return false;
+ }
+ return true;
+ }
+
+ private Either<Boolean, StorageOperationStatus> setVfToscaResourceName(ResourceMetadataData resource) {
+ String resourceName = resource.getMetadataDataDefinition().getName();
+ String resourceType = ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType()
+ .name();
+ String toscaResourceName = CommonBeUtils.generateToscaResourceName(resourceType,
+ resource.getMetadataDataDefinition().getSystemName());
+ log.debug("Setting tosca resource name {} to VF {}", toscaResourceName, resourceName);
+ ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).setToscaResourceName(toscaResourceName);
+
+ Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource,
+ ResourceMetadataData.class);
+ if (updateNode.isRight()) {
+ log.error("DB error during while updating VF tosca resource name {}, error: {}",
+ resource.getMetadataDataDefinition().getName(), updateNode.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ log.debug("Tosca resource name of VF {} was successfully updated",
+ resource.getMetadataDataDefinition().getName());
+ return Either.left(true);
+ }
+
+ public boolean testAddGroupUuids(String appConfigDir) {
+
+ if (!AddGroupUuid.addGroupUuids(titanGenericDao, log, false)) {
+ log.error("Failed to update group UUIDs");
+ return false;
+ }
+ return true;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java
new file mode 100644
index 0000000000..27b9351ebc
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java
@@ -0,0 +1,185 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1604;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.openecomp.sdc.be.resources.data.TagData;
+import org.openecomp.sdc.be.utils.CommonBeUtils;
+import org.openecomp.sdc.common.api.Constants;
+import org.openecomp.sdc.common.util.ValidationUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import fj.data.Either;
+
+public class VfcNamingAlignment {
+
+ private static Logger log = LoggerFactory.getLogger(VfcNamingAlignment.class.getName());
+
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+ @Autowired
+ protected ResourceOperation resourceOperation;
+
+ public boolean alignVfcNames1604(String appConfigDir) {
+ log.debug("Started alignVfcNames1604 procedure..");
+ log.debug("Getting all resources with resourceType = VFC/CP/VL");
+ boolean result = false;
+ try {
+ Map<String, Object> notProperties = new HashMap<>();
+ notProperties.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+ Either<List<ResourceMetadataData>, TitanOperationStatus> allVfcResources = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, null, notProperties, ResourceMetadataData.class);
+ if (allVfcResources.isRight()) {
+ log.error("Couldn't get VFC resources from DB, error: {}", allVfcResources.right().value());
+ result = false;
+ return result;
+ }
+ List<ResourceMetadataData> vfcList = allVfcResources.left().value();
+ if (vfcList == null) {
+ log.error("Couldn't get VFC/CP/VL resources from DB, no resources found");
+ result = false;
+ return result;
+ }
+ log.debug("Found {} VFC/CP/VL resources", vfcList.size());
+ for (ResourceMetadataData vfc : vfcList) {
+ log.debug("Checking resource {}", vfc.getMetadataDataDefinition().getName());
+ boolean wasChanged = false;
+
+ Either<Boolean, StorageOperationStatus> vfcEither = fixToscaNameEmpty(vfc);
+ if (vfcEither.isRight()) {
+ log.error("DB error during checkIsToscaNameEmpty - exiting...");
+ result = false;
+ return result;
+ }
+ wasChanged = wasChanged | vfcEither.left().value();
+
+ vfcEither = fixVfcToscaNameHasVf(vfc);
+ if (vfcEither.isRight()) {
+ log.error("DB error during checkIsVfcToscaNameHasVf - exiting...");
+ result = false;
+ return result;
+ }
+ wasChanged = wasChanged | vfcEither.left().value();
+
+ if (wasChanged) {
+ Either<ResourceMetadataData, TitanOperationStatus> updateVfc = updateVfc(vfc);
+ if (updateVfc.isRight()) {
+ log.error("DB error during while updating resource {}, error: {} - exiting...",
+ vfc.getMetadataDataDefinition().getName(), updateVfc.right().value());
+ result = false;
+ return result;
+ }
+ log.debug("Resource {} was successfully updated", vfc.getMetadataDataDefinition().getName());
+ }
+
+ }
+ result = true;
+ } finally {
+ if (!result) {
+ titanGenericDao.rollback();
+ log.debug("**********************************************");
+ log.debug("alignVfcNames1604 procedure FAILED!!");
+ log.debug("**********************************************");
+ } else {
+ titanGenericDao.commit();
+ log.debug("**********************************************");
+ log.debug("alignVfcNames1604 procedure ended successfully!");
+ log.debug("**********************************************");
+ }
+ }
+
+ return result;
+ }
+
+ private Either<ResourceMetadataData, TitanOperationStatus> updateVfc(ResourceMetadataData vfc) {
+ return titanGenericDao.updateNode(vfc, ResourceMetadataData.class);
+ }
+
+ private Either<Boolean, StorageOperationStatus> fixToscaNameEmpty(ResourceMetadataData vfc) {
+ String toscaResourceName = ((ResourceMetadataDataDefinition) vfc.getMetadataDataDefinition())
+ .getToscaResourceName();
+ if (toscaResourceName == null || toscaResourceName.trim().equals(Constants.EMPTY_STRING)) {
+ log.debug("Tosca resource name is empty - setting new tosca name...");
+ Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName = generateAndSetToscaResourceName(
+ vfc, null);
+ if (generateAndSetToscaResourceName.isRight()) {
+ return Either.right(generateAndSetToscaResourceName.right().value());
+ }
+ return Either.left(true);
+ }
+ return Either.left(false);
+ }
+
+ private Either<Boolean, StorageOperationStatus> fixVfcToscaNameHasVf(ResourceMetadataData vfc) {
+ String toscaResourceName = ((ResourceMetadataDataDefinition) vfc.getMetadataDataDefinition())
+ .getToscaResourceName();
+ if (toscaResourceName.contains(".vf.")) {
+ log.debug("Tosca resource name {} is VF-style - setting new tosca name...", toscaResourceName);
+ Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName = generateAndSetToscaResourceName(
+ vfc, null);
+ if (generateAndSetToscaResourceName.isRight()) {
+ return Either.right(generateAndSetToscaResourceName.right().value());
+ }
+ return Either.left(true);
+ }
+ return Either.left(false);
+ }
+
+
+ private Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName(ResourceMetadataData vfc,
+ String toscaResourceName) {
+ if (toscaResourceName == null) {
+ toscaResourceName = CommonBeUtils.generateToscaResourceName(
+ ((ResourceMetadataDataDefinition) vfc.getMetadataDataDefinition()).getResourceType().name(),
+ vfc.getMetadataDataDefinition().getSystemName());
+ }
+ Either<Boolean, StorageOperationStatus> validateToscaResourceNameExists = resourceOperation
+ .validateToscaResourceNameExists(toscaResourceName);
+ if (validateToscaResourceNameExists.isRight()) {
+ StorageOperationStatus storageOperationStatus = validateToscaResourceNameExists.right().value();
+ log.error("Couldn't validate toscaResourceName uniqueness - error: {}", storageOperationStatus);
+ return Either.right(storageOperationStatus);
+ }
+ if (validateToscaResourceNameExists.left().value()) {
+ log.debug("Setting tosca resource name to be {}", toscaResourceName);
+ ((ResourceMetadataDataDefinition) vfc.getMetadataDataDefinition()).setToscaResourceName(toscaResourceName);
+ return Either.left(true);
+ } else {
+ // As agreed with Renana - cannot be fixed automatically
+ log.warn("toscaResourceName {} is not unique! Cannot set it. Continuing...");
+ return Either.left(false);
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1607/CsarMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1607/CsarMigration.java
new file mode 100644
index 0000000000..b141eecb87
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1607/CsarMigration.java
@@ -0,0 +1,93 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1607;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.thinkaurelius.titan.core.TitanVertex;
+
+import fj.data.Either;
+
+public class CsarMigration {
+ private static Logger log = LoggerFactory.getLogger(CsarMigration.class.getName());
+
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+
+ public boolean removeCsarResources() {
+ Map<String, Object> props = new HashMap<>();
+ props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+
+ Either<List<ResourceMetadataData>, TitanOperationStatus> byCriteria = titanGenericDao
+ .getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+ if (byCriteria.isRight()) {
+ log.debug("Failed to fetch VF resources by criteria ", byCriteria.right().value());
+ return false;
+ }
+ List<ResourceMetadataData> resources = byCriteria.left().value();
+
+ try {
+ for (ResourceMetadataData data : resources) {
+ if (data.getMetadataDataDefinition().getCsarUUID() != null) {
+ log.debug("VF {} with CSAR {}", data.getUniqueId(), data.getMetadataDataDefinition().getCsarUUID());
+ Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao
+ .getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), data.getUniqueId());
+ if (vertexByProperty.isRight()) {
+ log.debug("Failed to fetch vertex with id {} . skip resource {} ", data.getUniqueId(),
+ data.getMetadataDataDefinition().getName());
+ continue;
+ }
+ Vertex vertex = vertexByProperty.left().value();
+ Iterator<VertexProperty<Object>> properties = vertex
+ .properties(GraphPropertiesDictionary.CSAR_UUID.getProperty());
+ while (properties.hasNext()) {
+ VertexProperty<Object> next = properties.next();
+ next.remove();
+ }
+
+ }
+ }
+ titanGenericDao.commit();
+ } catch (Exception e) {
+ log.debug("Failed to clean CSAR UUID. rollback");
+ titanGenericDao.rollback();
+ }
+
+ return true;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java
new file mode 100644
index 0000000000..36ac98e578
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java
@@ -0,0 +1,387 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1610;
+
+import com.google.gson.Gson;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
+import fj.data.Either;
+
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.operations.impl.CacheMangerOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.*;
+
+/**
+ * Created by mlando on 8/17/2016.
+ */
+public class TitanFixUtils {
+ private static Logger log = LoggerFactory.getLogger(TitanFixUtils.class.getName());
+
+ @Autowired
+ protected TitanGenericDao titanGenericDao;
+ @Autowired
+ protected CacheMangerOperation cacheMangerOperation;
+
+ public boolean fixIconsInNormatives() {
+ log.info("starting fix");
+ String vlName = "VL";
+ String elineName = "VL_ELINE";
+ String elineFixedName = "VL ELINE";
+ Either<TitanGraph, TitanOperationStatus> graphResult = titanGenericDao.getGraph();
+ if (graphResult.isRight()) {
+ log.error("failed to get graph object.");
+ return false;
+ }
+
+ boolean operationFailed = false;
+ Map<String, Object> vlVerticeProperties = null;
+ Map<String, Object> elineVerticeProperties = null;
+
+ try {
+ TitanGraph titanGraph = graphResult.left().value();
+ log.info("look up vl :{}", vlName);
+
+ Iterable<TitanVertex> vertices = titanGraph.query()
+ .has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Resource.getName())
+ .has(GraphPropertiesDictionary.NAME.getProperty(), vlName)
+ .has(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true).vertices();
+
+ if (vertices == null) {
+ log.error("failed to get vernice for resource name {}", vlName);
+ operationFailed = true;
+ return false;
+ }
+
+ Iterator<TitanVertex> iterator = vertices.iterator();
+ List<TitanVertex> vertexList = new ArrayList<>();
+
+ if (iterator == null) {
+ log.error("failed to get iterator over vertices object returned for resource id {}", vlName);
+ operationFailed = true;
+ return false;
+ }
+
+ while (iterator.hasNext()) {
+ TitanVertex vertex = iterator.next();
+ vertexList.add(vertex);
+ }
+
+ if (!(vertexList.size() == 1)) {
+ log.error("failed to get 1 vertex for resource {} with highest true. instead got {}", vlName,
+ vertexList.size());
+ operationFailed = true;
+ return false;
+ }
+
+ TitanVertex vlVertex = vertexList.get(0);
+
+ log.info("look up eline:{}", elineName);
+
+ boolean vl_eline_found = true;
+
+ vertices = titanGraph.query()
+ .has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Resource.getName())
+ .has(GraphPropertiesDictionary.NAME.getProperty(), elineName)
+ .has(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true).vertices();
+
+ if (vertices == null) {
+ log.error("failed to get vertices object returned for resource {}", elineName);
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+
+ if (vl_eline_found) {
+ iterator = vertices.iterator();
+ vertexList = new ArrayList<>();
+ if (iterator == null) {
+ log.error("failed to get iterator over vertices object returned for resource id {}", elineName);
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+
+ if (vl_eline_found) {
+ while (iterator.hasNext()) {
+ TitanVertex vertex = iterator.next();
+ vertexList.add(vertex);
+ }
+
+ if (!(vertexList.size() == 1)) {
+ log.error("failed to get 1 vertex for resource {} with highest true. instead got {}",
+ elineName, vertexList.size());
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+ }
+ }
+
+ if (!vl_eline_found) {
+ log.info("look up eline:{}", elineFixedName);
+ vl_eline_found = true;
+ operationFailed = false;
+
+ vertices = titanGraph.query()
+ .has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Resource.getName())
+ .has(GraphPropertiesDictionary.NAME.getProperty(), elineFixedName)
+ .has(GraphPropertiesDictionary.IS_HIGHEST_VERSION.getProperty(), true).vertices();
+
+ if (vertices == null) {
+ log.error("failed to get vertices object returned for resource {}", elineFixedName);
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+
+ if (vl_eline_found) {
+ iterator = vertices.iterator();
+ vertexList = new ArrayList<>();
+ if (iterator == null) {
+ log.error("failed to get iterator over vertices object returned for resource id {}",
+ elineFixedName);
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+
+ if (vl_eline_found) {
+ while (iterator.hasNext()) {
+ TitanVertex vertex = iterator.next();
+ vertexList.add(vertex);
+ }
+
+ if (!(vertexList.size() == 1)) {
+ log.error("failed to get 1 vertex for resource {} with highest true. instead got {}",
+ elineFixedName, vertexList.size());
+ operationFailed = true;
+
+ vl_eline_found = false;
+ }
+ }
+ }
+ }
+
+ if (!vl_eline_found) {
+ return false;
+ } else {
+ TitanVertex elineVertex = vertexList.get(0);
+
+ vlVerticeProperties = titanGenericDao.getProperties(vlVertex);
+
+ log.info("VL Vertice Properties {}", vlVerticeProperties);
+ if ("network".equals(vlVerticeProperties.get(GraphPropertiesDictionary.ICON.getProperty()))) {
+ log.info("nothing to update in vl");
+ } else {
+ log.info("updating property icon of vl");
+ vlVertex.property(GraphPropertiesDictionary.ICON.getProperty(), "network");
+ }
+
+ elineVerticeProperties = titanGenericDao.getProperties(elineVertex);
+
+ log.info("eline vertice Properties {}", elineVerticeProperties);
+ if ("network".equals(elineVerticeProperties.get(GraphPropertiesDictionary.ICON.getProperty()))) {
+ log.info("nothing to update in eline");
+ } else {
+ log.info("updating property icon of eline");
+ elineVertex.property(GraphPropertiesDictionary.ICON.getProperty(), "network");
+ }
+
+ if ("VL ELINE".equals(elineVerticeProperties.get(GraphPropertiesDictionary.NAME.getProperty()))) {
+ log.info("nothing to update in eline");
+ } else {
+ log.info("updating property name and tag of eline");
+ elineVertex.property(GraphPropertiesDictionary.NAME.getProperty(), elineFixedName);
+ List<String> tags = new ArrayList<>();
+ tags.add("VL ELINE");
+ elineVertex.property(GraphPropertiesDictionary.TAGS.getProperty(), new Gson().toJson(tags));
+ }
+
+ log.info("print current properties state");
+
+ vlVerticeProperties = titanGenericDao.getProperties(vlVertex);
+
+ log.info("vertice vl Properties {}", vlVerticeProperties);
+ elineVerticeProperties = titanGenericDao.getProperties(elineVertex);
+
+ log.info("vertice eline Properties {}", elineVerticeProperties);
+ }
+
+ try {
+ Thread.sleep(30 * 1000);
+ } catch (InterruptedException e) {
+ log.error("exception", e);
+ }
+ return true;
+ } finally {
+ if (operationFailed) {
+ titanGenericDao.rollback();
+ } else {
+ titanGenericDao.commit();
+ long time = System.currentTimeMillis();
+ if (vlVerticeProperties != null) {
+ cacheMangerOperation.updateComponentInCache(
+ (String) vlVerticeProperties.get(GraphPropertiesDictionary.UNIQUE_ID.getProperty()), time,
+ NodeTypeEnum.Resource);
+ }
+ if (elineVerticeProperties != null) {
+ cacheMangerOperation.updateComponentInCache(
+ (String) elineVerticeProperties.get(GraphPropertiesDictionary.UNIQUE_ID.getProperty()),
+ time, NodeTypeEnum.Resource);
+ }
+ }
+ }
+ }
+
+ /**
+ * in 1610 we encounter an issue that if a capability property overrides a
+ * property of a derived capability then it was created with out a property
+ * type when it was first imported as part of the capability types. this
+ * will add property type to the properties missing it.
+ */
+ public boolean fixCapabiltyPropertyTypes() {
+
+ String propertyIdSecure = "tosca.capabilities.Endpoint.Admin.secure";
+ String propertyIdNetworkName = "tosca.capabilities.Endpoint.Public.network_name";
+ Either<TitanGraph, TitanOperationStatus> graphResult = titanGenericDao.getGraph();
+
+ if (graphResult.isRight()) {
+ log.error("failed to get graph object.");
+ return false;
+ }
+
+ boolean operationFailed = false;
+ try {
+ TitanGraph titanGraph = graphResult.left().value();
+ log.info("look up propertyIdSecure:{}", propertyIdSecure);
+ Iterable<TitanVertex> vertices = titanGraph.query()
+ .has(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Property), propertyIdSecure).vertices();
+ if (vertices == null) {
+ log.error("failed to get vertices object returned for resource id {}", propertyIdSecure);
+ operationFailed = true;
+ return false;
+ }
+ Iterator<TitanVertex> iterator = vertices.iterator();
+ List<TitanVertex> vertexList = new ArrayList<>();
+
+ if (iterator == null) {
+ log.error("failed to get iterator over vertices object returned for resource id " + propertyIdSecure);
+ operationFailed = true;
+ return false;
+ }
+
+ while (iterator.hasNext()) {
+ TitanVertex vertex = iterator.next();
+ vertexList.add(vertex);
+ }
+
+ if (!(vertexList.size() == 1)) {
+ log.error("failed to get 1 vertex for resource id {} instead got {}", propertyIdSecure,
+ vertexList.size());
+ operationFailed = true;
+ return false;
+ }
+
+ TitanVertex propertyVerticeSecure = vertexList.get(0);
+
+ log.info("look up propertyIdNetworkName:{}", propertyIdNetworkName);
+ vertices = titanGraph.query()
+ .has(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Property), propertyIdNetworkName).vertices();
+ if (vertices == null) {
+ log.error("failed to get vertices object returned for resource id {}", propertyIdNetworkName);
+ operationFailed = true;
+ return false;
+ }
+
+ iterator = vertices.iterator();
+ vertexList = new ArrayList<>();
+
+ if (iterator == null) {
+ log.error("failed to get iterator over vertices object returned for resource id {}",
+ propertyIdNetworkName);
+ operationFailed = true;
+ return false;
+ }
+
+ while (iterator.hasNext()) {
+ TitanVertex vertex = iterator.next();
+ vertexList.add(vertex);
+ }
+
+ if (!(vertexList.size() == 1)) {
+ log.error("failed to get 1 vertex for resource id {} instead got {}", propertyIdNetworkName,
+ vertexList.size());
+ operationFailed = true;
+ return false;
+ }
+
+ TitanVertex propertyVerticeNetworkName = vertexList.get(0);
+
+ Map<String, Object> verticeNetworkNameProperties = titanGenericDao
+ .getProperties(propertyVerticeNetworkName);
+
+ log.info("vertice NetworkName Properties {}", verticeNetworkNameProperties);
+ Object type = verticeNetworkNameProperties.get(GraphPropertiesDictionary.TYPE.getProperty());
+ if (type == null || "".equals(type)) {
+ log.info("updating property Vertice Network Name");
+ propertyVerticeNetworkName.property(GraphPropertiesDictionary.TYPE.getProperty(), "string");
+ }
+
+ Map<String, Object> verticeSecureProperties = titanGenericDao.getProperties(propertyVerticeSecure);
+
+ log.info("vertice Secure Properties {}", verticeSecureProperties);
+
+ type = verticeSecureProperties.get(GraphPropertiesDictionary.TYPE.getProperty());
+
+ if (type == null || "".equals(type)) {
+ log.info("updating property Vertice Secure");
+ propertyVerticeSecure.property(GraphPropertiesDictionary.TYPE.getProperty(), "boolean");
+ }
+
+ log.info("print current properties state");
+
+ verticeNetworkNameProperties = titanGenericDao.getProperties(propertyVerticeNetworkName);
+
+ log.info("vertice NetworkName Properties {}", verticeNetworkNameProperties);
+
+ verticeSecureProperties = titanGenericDao.getProperties(propertyVerticeSecure);
+
+ log.info("vertice Secure Properties {}", verticeSecureProperties);
+
+ return true;
+ } finally {
+ if (operationFailed) {
+ titanGenericDao.rollback();
+ } else {
+ titanGenericDao.commit();
+ }
+ }
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java
new file mode 100644
index 0000000000..347a5705b7
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java
@@ -0,0 +1,461 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1610;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgeLabels;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Operation;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.operations.api.IArtifactOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.AbstractOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.ArtifactData;
+import org.openecomp.sdc.be.resources.data.ComponentMetadataData;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.openecomp.sdc.be.resources.data.ServiceMetadataData;
+import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.openecomp.sdc.common.datastructure.Wrapper;
+import org.openecomp.sdc.common.util.StreamUtils;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import fj.data.Either;
+
+/**
+ * This Class holds the logic to add Tosca Artifacts placeholder and payload.<br>
+ * This addition is done for old version of Services and Resources (pre 1610) that weren't created with them.<br>
+ *
+ * @author mshitrit <br>
+ *
+ *
+ */
+public class ToscaArtifactsAlignment extends AbstractOperation {
+ @Autowired
+ private IArtifactOperation artifactOperation;
+
+ @Autowired
+ private ServiceBusinessLogic serviceBusinessLogic;
+
+ private static Logger log = LoggerFactory.getLogger(ToscaArtifactsAlignment.class.getName());
+
+ private static final String ERROR_PREFIX = "Tosca Artifact Alignment Error: ";
+
+ // API that Fetches Resource
+ private final Function<ComponentMetadataData, Resource> resourceFetcher = componentMD -> getComponent(componentMD, ComponentTypeEnum.RESOURCE);
+ // API that Fetches Service
+ private final Function<ComponentMetadataData, Service> serviceFetcher = componentMD -> getComponent(componentMD, ComponentTypeEnum.SERVICE);
+ // Class Getters
+ private final Supplier<Class<ResourceMetadataData>> resourceClassGetter = () -> ResourceMetadataData.class;
+ private final Supplier<Class<ServiceMetadataData>> serviceClassGetter = () -> ServiceMetadataData.class;
+
+ /**
+ * This method holds the logic to add Tosca Artifacts placeholder and payload.<br>
+ *
+ * @return true if succeed otherwise returns false
+ */
+ public boolean alignToscaArtifacts() {
+ Wrapper<TitanOperationStatus> errorWrapper = new Wrapper<>();
+ List<ResourceMetadataData> allResources = new ArrayList<>();
+ List<ResourceMetadataData> resourcesWithoutToscaPlaceHolder = new ArrayList<>();
+ List<ServiceMetadataData> allServices = new ArrayList<>();
+ List<ServiceMetadataData> servicesWithoutToscaPlaceHolder = new ArrayList<>();
+ log.debug("alignToscaArtifacts Start");
+ try {
+
+ if (errorWrapper.isEmpty()) {
+ log.info("Fetching all resources");
+ fillAllComponetOfSpecificType(allResources, NodeTypeEnum.Resource, resourceClassGetter, errorWrapper);
+ }
+
+ if (errorWrapper.isEmpty()) {
+ // Filter Resources Without Tosca Artifacts
+ log.info("filtering resources to add tosca placeholder");
+ Either<List<ResourceMetadataData>, TitanOperationStatus> eitherRelevantResources = getComponentsWithMissingToscaArtifacts(resourceClassGetter, NodeTypeEnum.Resource, allResources);
+ fillListOrWrapper(errorWrapper, resourcesWithoutToscaPlaceHolder, eitherRelevantResources);
+ }
+
+ if (errorWrapper.isEmpty()) {
+ // Add PlaceHolders To Resources
+ log.info("adding tosca placeholders artifacts to resources");
+ addToscaArtifactToComponents(resourcesWithoutToscaPlaceHolder, resourceFetcher, NodeTypeEnum.Resource, errorWrapper);
+ }
+ if (errorWrapper.isEmpty()) {
+ // Add payload to Resources
+ log.info("generating payload to tosca artifacts on resources");
+ fillResourcesPayload(allResources, errorWrapper);
+ }
+
+ if (errorWrapper.isEmpty()) {
+ log.info("Fetching all services");
+ fillAllComponetOfSpecificType(allServices, NodeTypeEnum.Service, serviceClassGetter, errorWrapper);
+ }
+ if (errorWrapper.isEmpty()) {
+ // Filter Services Without Tosca Artifacts
+ log.info("filtering services to add tosca placeholder");
+ Either<List<ServiceMetadataData>, TitanOperationStatus> eitherRelevantServices = getComponentsWithMissingToscaArtifacts(serviceClassGetter, NodeTypeEnum.Service, allServices);
+ fillListOrWrapper(errorWrapper, servicesWithoutToscaPlaceHolder, eitherRelevantServices);
+ }
+
+ if (errorWrapper.isEmpty()) {
+ // Add PlaceHolders To Services
+ log.info("adding tosca placeholders artifacts to services");
+ addToscaArtifactToComponents(servicesWithoutToscaPlaceHolder, serviceFetcher, NodeTypeEnum.Service, errorWrapper);
+ }
+
+ if (errorWrapper.isEmpty()) {
+ // Filter Services for Payload Add
+ // Add payload to Services
+ log.info("generating payload to tosca artifacts on services");
+ fillToscaArtifactPayload(allServices, serviceFetcher, errorWrapper);
+ }
+ } finally {
+ titanGenericDao.commit();
+ }
+ return errorWrapper.isEmpty();
+
+ }
+
+ private void fillResourcesPayload(List<ResourceMetadataData> allResources, Wrapper<TitanOperationStatus> errorWrapper) {
+ if (errorWrapper.isEmpty()) {
+ // First Only Non VF (CP, VL & VFC)
+ List<ResourceMetadataData> basicResources = allResources.stream().filter(e -> isBasicResource((ResourceMetadataDataDefinition) e.getMetadataDataDefinition())).collect(Collectors.toList());
+ // Filter resources for Payload Add
+ // Add payload to resources
+ fillToscaArtifactPayload(basicResources, resourceFetcher, errorWrapper);
+ }
+ if (errorWrapper.isEmpty()) {
+ // VFs
+ List<ResourceMetadataData> complexResource = allResources.stream().filter(e -> ((ResourceMetadataDataDefinition) e.getMetadataDataDefinition()).getResourceType() == ResourceTypeEnum.VF).collect(Collectors.toList());
+ // Filter resources for Payload Add
+ // Add payload to resources
+ fillToscaArtifactPayload(complexResource, resourceFetcher, errorWrapper);
+ }
+ }
+
+ private boolean isBasicResource(ResourceMetadataDataDefinition resourceMetadataDataDefinition) {
+ final ResourceTypeEnum resourceType = resourceMetadataDataDefinition.getResourceType();
+ boolean isBasicResource = resourceType == ResourceTypeEnum.CP || resourceType == ResourceTypeEnum.VL || resourceType == ResourceTypeEnum.VFC;
+ return isBasicResource;
+ }
+
+ private <T extends ComponentMetadataData> void fillAllComponetOfSpecificType(List<T> components, NodeTypeEnum nodeType, Supplier<Class<T>> classGetter, Wrapper<TitanOperationStatus> errorWrapper) {
+
+ Map<String, Object> props = new HashMap<String, Object>();
+ props.put(GraphPropertiesDictionary.IS_DELETED.getProperty(), true);
+ Either<List<T>, TitanOperationStatus> eitherComponentMD = titanGenericDao.getByCriteria(nodeType, null, props, classGetter.get());
+ if (eitherComponentMD.isLeft()) {
+ components.addAll(eitherComponentMD.left().value());
+ } else {
+ final TitanOperationStatus errorType = eitherComponentMD.right().value();
+ if (errorType != TitanOperationStatus.NOT_FOUND) {
+ log.error("{} When fetching all components of type:{} a titan error occured:{}", ERROR_PREFIX, nodeType.getName(), errorType.name());
+ errorWrapper.setInnerElement(errorType);
+ }
+ }
+
+ }
+
+ private <T extends ComponentMetadataData, R extends Component> void addToscaArtifactToComponents(List<T> relevantResources, Function<ComponentMetadataData, R> componentConvertor, NodeTypeEnum nodeType,
+ Wrapper<TitanOperationStatus> errorWrapper) {
+
+ // This Stream contains all create tosca placeholder results
+ Stream<StorageOperationStatus> addToscaToComponentsResultsStream = relevantResources.stream().map(e -> addToscaArtifacts(e, nodeType, componentConvertor));
+ // Execute the stream, and collect error
+ Optional<StorageOperationStatus> optionalError = addToscaToComponentsResultsStream.filter(e -> e != StorageOperationStatus.OK).findFirst();
+
+ // Handle error
+ if (optionalError.isPresent()) {
+ errorWrapper.setInnerElement(TitanOperationStatus.NOT_CREATED);
+ }
+ }
+
+ private <R extends Component> R getComponent(ComponentMetadataData md, ComponentTypeEnum componentTypeEnum) {
+ R result = null;
+ Either<R, StorageOperationStatus> eitherComponent = serviceBusinessLogic.getComponent(md.getMetadataDataDefinition().getUniqueId(), componentTypeEnum);
+ if (eitherComponent.isRight()) {
+ log.error("{} When fetching component {} of type:{} with uniqueId:{}", ERROR_PREFIX, md.getMetadataDataDefinition().getName(), componentTypeEnum.getValue(), md.getMetadataDataDefinition().getUniqueId());
+ } else {
+ result = eitherComponent.left().value();
+ }
+ return result;
+ }
+
+ private Either<Either<ArtifactDefinition, Operation>, ResponseFormat> populateToscaArtifactsWithLog(Component component, User user, boolean isInCertificationRequest, boolean inTransaction, boolean shouldLock) {
+ Either<Either<ArtifactDefinition, Operation>, ResponseFormat> ret;
+ try {
+ ret = serviceBusinessLogic.populateToscaArtifacts(component, user, isInCertificationRequest, inTransaction, shouldLock);
+ if (ret.isLeft()) {
+ log.debug("Added payload to tosca artifacts of component {} of type:{} with uniqueId:{}", component.getName(), component.getComponentType().getValue(), component.getUniqueId());
+ }
+ return ret;
+ } catch (Exception e) {
+ log.error("{} Exception Occured When filling tosca artifact payload for component {} of type:{} with uniqueId:{}", ERROR_PREFIX, component.getName(), component.getComponentType().name(), component.getUniqueId(), e);
+ throw e;
+ }
+ }
+
+ private <R extends Component, T extends ComponentMetadataData> void fillToscaArtifactPayload(List<T> relevantComponents, Function<ComponentMetadataData, R> componentCreator, Wrapper<TitanOperationStatus> errorWrapper) {
+
+ final User dummyUser = buildDummyUser();
+ // Stream for all fill payload results
+ Stream<ImmutablePair<Component, Either<Either<ArtifactDefinition, Operation>, ResponseFormat>>>
+ // Filter elements that needs generation of tosca payload
+ fillToscaPayloadResultsStream = relevantComponents.stream().filter(e -> isGenerateToscaPayload(e))
+ // Converts ComponentMetadataData to Component
+ .map(e -> componentCreator.apply(e))
+ // For each component generate payload for tosca
+ // artifacts
+ .map(component -> {
+ return new ImmutablePair<Component, Either<Either<ArtifactDefinition, Operation>, ResponseFormat>>(component, populateToscaArtifactsWithLog(component, dummyUser, true, true, false));
+ });
+
+ try {
+ // execute and the stream
+ Optional<Component> optionalError = fillToscaPayloadResultsStream.
+ // filter in error
+ filter(e -> e.getRight().isRight())
+ // convert the result to error and execute the stream
+ .map(e -> e.getLeft()).findFirst();
+
+ // Check if error occurred
+ if (optionalError.isPresent()) {
+ Component component = optionalError.get();
+ log.error("{} When filling tosca artifact payload for component {} of type:{} with uniqueId:{}", ERROR_PREFIX, component.getName(), component.getComponentType().name(), component.getUniqueId());
+
+ errorWrapper.setInnerElement(TitanOperationStatus.GENERAL_ERROR);
+ }
+ } catch (Exception e) {
+ log.error("{} When filling tosca artifact payload for components : {}", ERROR_PREFIX, e.getMessage(), e);
+ errorWrapper.setInnerElement(TitanOperationStatus.GENERAL_ERROR);
+ }
+ }
+
+ private <R extends Component> StorageOperationStatus addToscaArtifacts(ComponentMetadataData component, NodeTypeEnum nodeType, Function<ComponentMetadataData, R> componentCreator) {
+
+ StorageOperationStatus result = StorageOperationStatus.OK;
+ R componentDefinition = componentCreator.apply(component);
+
+ // Fetch artifacts to be Added
+ Either<List<ArtifactDefinition>, StorageOperationStatus> eitherToscaArtifacts = getToscaArtifactsToAdd(componentDefinition);
+ if (eitherToscaArtifacts.isRight()) {
+ result = eitherToscaArtifacts.right().value();
+ } else {
+ List<ArtifactDefinition> toscaArtifactsToAdd = eitherToscaArtifacts.left().value();
+ if (!CollectionUtils.isEmpty(eitherToscaArtifacts.left().value())) {
+ final Stream<ImmutablePair<ArtifactDefinition, Either<ArtifactDefinition, StorageOperationStatus>>> createdToscaPlaceHolderStream = toscaArtifactsToAdd.stream()
+ // creates the artifact in the graph
+ .map(artifactDef -> new ImmutablePair<ArtifactDefinition, Either<ArtifactDefinition, StorageOperationStatus>>(artifactDef,
+ artifactOperation.addArifactToComponent(artifactDef, componentDefinition.getUniqueId(), nodeType, false, true)));
+
+ // Execute the stream, and collect error
+ Optional<ImmutablePair<ArtifactDefinition, StorageOperationStatus>> optionalError = createdToscaPlaceHolderStream.filter(e -> e.getRight().isRight()).map(e -> new ImmutablePair<>(e.getLeft(), e.getRight().right().value()))
+ .findFirst();
+
+ // In case error occurred
+ if (optionalError.isPresent()) {
+ ArtifactDefinition toscaArtifact = optionalError.get().getLeft();
+ StorageOperationStatus storageError = optionalError.get().getRight();
+ log.error("{} When adding tosca artifact of type {} to component {} of type:{} " + "with uniqueId:{} a storageError occurred:{}", ERROR_PREFIX, toscaArtifact.getArtifactType(), component.getMetadataDataDefinition().getName(),
+ nodeType.getName(), component.getMetadataDataDefinition().getUniqueId(), storageError.name());
+
+ result = storageError;
+ } else {
+ log.debug("Added tosca artifacts to component {} of type:{} with uniqueId:{}", component.getMetadataDataDefinition().getName(), nodeType.getName(), component.getMetadataDataDefinition().getUniqueId());
+ }
+
+ }
+ }
+
+ return result;
+ }
+
+ private <R extends Component> Either<List<ArtifactDefinition>, StorageOperationStatus> getToscaArtifactsToAdd(R componentDefinition) {
+
+ Either<List<ArtifactDefinition>, StorageOperationStatus> result;
+ List<ArtifactDefinition> toscaArtifactsAlreadyExist = new ArrayList<>();
+ if (!MapUtils.isEmpty(componentDefinition.getToscaArtifacts())) {
+ toscaArtifactsAlreadyExist.addAll(componentDefinition.getToscaArtifacts().values());
+ }
+
+ // Set Tosca Artifacts on component
+ serviceBusinessLogic.setToscaArtifactsPlaceHolders(componentDefinition, buildDummyUser());
+
+ List<ArtifactDefinition> toscaArtifactsToAdd = new ArrayList<>();
+ if (!MapUtils.isEmpty(componentDefinition.getToscaArtifacts())) {
+ final Collection<ArtifactDefinition> allToscaArtifacts = componentDefinition.getToscaArtifacts().values();
+ Set<String> artifactTypesExist = toscaArtifactsAlreadyExist.stream().map(e -> e.getArtifactType()).collect(Collectors.toSet());
+ toscaArtifactsToAdd = allToscaArtifacts.stream().filter(e -> !artifactTypesExist.contains(e.getArtifactType())).collect(Collectors.toList());
+ result = Either.left(toscaArtifactsToAdd);
+ } else {
+ log.error("{} failed to add tosca artifacts in bussiness logic to component {} of type:{} with uniqueId:{}", ERROR_PREFIX, componentDefinition.getName(), componentDefinition.getComponentType().getValue(),
+ componentDefinition.getUniqueId());
+ result = Either.right(StorageOperationStatus.ARTIFACT_NOT_FOUND);
+ }
+ return result;
+ }
+
+ private User buildDummyUser() {
+ User user = new User();
+ user.setUserId("migrationTask");
+ return user;
+ }
+
+ private boolean isGenerateToscaPayload(ComponentMetadataData component) {
+ final String state = component.getMetadataDataDefinition().getState();
+ boolean componentLifeCycleStateIsValid = LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name().equals(state) || LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT.name().equals(state);
+
+ return !componentLifeCycleStateIsValid;
+
+ }
+
+ private <T> void fillListOrWrapper(Wrapper<TitanOperationStatus> wrapper, List<T> listToFill, Either<List<T>, TitanOperationStatus> either) {
+ if (either.isRight()) {
+ final TitanOperationStatus errorType = either.right().value();
+ if (errorType != TitanOperationStatus.NOT_FOUND) {
+ wrapper.setInnerElement(errorType);
+ }
+ } else {
+ listToFill.addAll(either.left().value());
+ }
+ }
+
+ private <T extends ComponentMetadataData> Either<List<T>, TitanOperationStatus> getComponentsWithMissingToscaArtifacts(Supplier<Class<T>> classGetter, NodeTypeEnum nodeType, List<T> allComponents) {
+
+ Either<List<T>, TitanOperationStatus> result;
+ Stream<ImmutablePair<T, Either<List<ArtifactData>, TitanOperationStatus>>> componentsWithToscaStream =
+ // Create a Stream of pairs : component and its Tosca Artifacts
+ allComponents.stream().map(e -> new ImmutablePair<>(e, getToscaArtifatcs(e, nodeType)));
+
+ List<ImmutablePair<T, Either<List<ArtifactData>, TitanOperationStatus>>> componentsWithToscaArtifacts =
+ // Collect the stream to list.
+ // in case getToscaArtifatcs failed, the first failure is
+ // added to the list
+ // (the collection stops after first failure)
+ StreamUtils.takeWhilePlusOneNoEval(componentsWithToscaStream, e -> e.getRight().isLeft()).collect(Collectors.toList());
+
+ // retrieve the failure optional (it may or may not exist)
+ Optional<TitanOperationStatus> isErrorOccured = componentsWithToscaArtifacts.stream()
+ // convert to the right side of the pair of type Either
+ .map(e -> e.getRight())
+ // Filter in only the errors
+ .filter(e -> e.isRight()).
+ // map the error from Either to TitanOperationStatus
+ map(e -> e.right().value()).findFirst();
+
+ // In case failure occurred
+ if (isErrorOccured.isPresent()) {
+ result = Either.right(isErrorOccured.get());
+ // In case NO failure occurred
+ } else {
+ List<T> filteredComponents = componentsWithToscaArtifacts.stream()
+ // Filter in only elements that does NOT have tosca
+ // artifacts
+ .filter(e -> isNotContainAllToscaArtifacts(e))
+ // Convert back to Components List & collect
+ .map(e -> e.getLeft()).collect(Collectors.toList());
+
+ result = Either.left(filteredComponents);
+ }
+
+ return result;
+ }
+
+ private <T extends ComponentMetadataData> boolean isNotContainAllToscaArtifacts(ImmutablePair<T, Either<List<ArtifactData>, TitanOperationStatus>> pair) {
+
+ final List<ArtifactData> artifactList = pair.getRight().left().value();
+
+ Set<ArtifactTypeEnum> filteredToscaList = artifactList.stream().
+ // Convert to ArtifactDataDefinition
+ map(e -> e.getArtifactDataDefinition()).
+ // Filter in Only Tosca Artifacts
+ filter(e -> e.getArtifactGroupType() == ArtifactGroupTypeEnum.TOSCA).
+ // Convert To ArtifactTypeEnum
+ map(e -> ArtifactTypeEnum.findType(e.getArtifactType())).
+ // Filter Out nulls in case of Type not found
+ filter(e -> e != null).collect(Collectors.toSet());
+
+ boolean toscaArifactContained = filteredToscaList.contains(ArtifactTypeEnum.TOSCA_CSAR) && filteredToscaList.contains(ArtifactTypeEnum.TOSCA_TEMPLATE);
+ return !toscaArifactContained;
+ }
+
+ private <T extends ComponentMetadataData> Either<List<ArtifactData>, TitanOperationStatus> getToscaArtifatcs(T component, NodeTypeEnum nodeType) {
+
+ Either<List<ArtifactData>, TitanOperationStatus> result;
+ // All The Artifacts of the Component
+ Either<List<ImmutablePair<ArtifactData, GraphEdge>>, TitanOperationStatus> eitherComponentArtifacts = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(nodeType), component.getMetadataDataDefinition().getUniqueId(),
+ GraphEdgeLabels.ARTIFACT_REF, NodeTypeEnum.ArtifactRef, ArtifactData.class);
+
+ if (eitherComponentArtifacts.isLeft()) {
+ // Convert Artifact Edge Pair to Artifact
+ List<ArtifactData> toscaArtifacts = eitherComponentArtifacts.left().value().stream()
+ // map ImmutablePair<ArtifactData, GraphEdge> to
+ // ArtifactData
+ .map(e -> e.getLeft())
+ // Filter in only Tosca Artifacts
+ .filter(artifact -> artifact.getArtifactDataDefinition().getArtifactGroupType() == ArtifactGroupTypeEnum.TOSCA)
+ // Collect
+ .collect(Collectors.toList());
+ result = Either.left(toscaArtifacts);
+ } else if (eitherComponentArtifacts.right().value() == TitanOperationStatus.NOT_FOUND) {
+ result = Either.left(new ArrayList<>());
+ } else {
+ final TitanOperationStatus titanError = eitherComponentArtifacts.right().value();
+ log.error("{} When fetching artifacts for component {} of type:{} with uniqueId:{} a titanError occurred:{}", ERROR_PREFIX, component.getMetadataDataDefinition().getName(), nodeType.getName(),
+ component.getMetadataDataDefinition().getUniqueId(), titanError.name());
+
+ result = Either.right(titanError);
+ }
+
+ return result;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
new file mode 100644
index 0000000000..e29cd7eb75
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
@@ -0,0 +1,97 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DataSchemaMenu {
+
+ private static Logger log = LoggerFactory.getLogger(DataSchemaMenu.class.getName());
+
+ public static void main(String[] args) throws Exception {
+
+ String operation = args[0];
+
+ String appConfigDir = args[1];
+
+ if (args == null || args.length < 2) {
+ usageAndExit();
+ }
+
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
+ appConfigDir);
+ ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+
+ try {
+
+ switch (operation.toLowerCase()) {
+ case "create-cassandra-structures":
+ log.debug("Start create cassandra keyspace, tables and indexes");
+ if (SdcSchemaBuilder.createSchema()) {
+ log.debug("create cassandra keyspace, tables and indexes successfull");
+ System.exit(0);
+ } else {
+ log.debug("create cassandra keyspace, tables and indexes failed");
+ System.exit(2);
+ }
+ case "create-titan-structures":
+ log.debug("Start create titan keyspace, tables and indexes");
+ if (SdcSchemaBuilder.createSchema()) {
+ log.debug("create cassandra keyspace, tables and indexes successfull");
+ System.exit(0);
+ } else {
+ log.debug("create cassandra keyspace, tables and indexes failed");
+ System.exit(2);
+ }
+ case "clean-cassndra":
+ log.debug("Start clean keyspace, tables");
+ if (SdcSchemaBuilder.deleteSchema()) {
+ log.debug(" successfull");
+ System.exit(0);
+ } else {
+ log.debug(" failed");
+ System.exit(2);
+ }
+ default:
+ usageAndExit();
+ }
+ } catch (Throwable t) {
+ t.printStackTrace();
+ log.debug("create cassandra keyspace, tables and indexes failed");
+ System.exit(3);
+ }
+ }
+
+ private static void usageAndExit() {
+ DataSchemeUsage();
+ System.exit(1);
+ }
+
+ private static void DataSchemeUsage() {
+ System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
new file mode 100644
index 0000000000..17008b31a4
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
@@ -0,0 +1,109 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.asdctool.impl.DataMigration;
+import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.AppConfig;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.ServiceMigration;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class EsToCassandraDataMigrationMenu {
+
+ private static Logger log = LoggerFactory.getLogger(MigrationMenu.class.getName());
+
+ public static void main(String[] args) throws Exception {
+
+ if (args == null || args.length < 2) {
+ usageAndExit();
+ }
+ String operation = args[0];
+
+ String appConfigDir = args[1];
+ System.setProperty("config.home", appConfigDir);
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
+ appConfigDir);
+ ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
+ EsToCassandraDataMigrationConfig.class);
+ DataMigration dataMigration = null;
+ try {
+ switch (operation.toLowerCase()) {
+ case "es-to-cassndra-migration":
+ dataMigration = (DataMigration) context.getBean("DataMigrationBean");
+ log.debug("Start migration from ES to C* ");
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, true, true)) {
+ log.debug("migration from ES to C* was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("migration from ES to C* failed");
+ System.exit(2);
+ }
+ break;
+ case "es-to-cassndra-migration-export-only":
+ dataMigration = (DataMigration) context.getBean("DataMigrationBean");
+ log.debug("Start migration export only from ES to C* ");
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, true, false)) {
+ log.debug("migration export only from ES to C* was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("migration export only from ES to C* failed");
+ System.exit(2);
+ }
+ break;
+ case "es-to-cassndra-migration-import-only":
+ dataMigration = (DataMigration) context.getBean("DataMigrationBean");
+ log.debug("Start migration import only from ES to C* ");
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, false, true)) {
+ log.debug("migration import only from ES to C* was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("migration import only from ES to C* failed");
+ System.exit(2);
+ }
+ break;
+ default:
+ usageAndExit();
+ }
+ } catch (Throwable t) {
+ t.printStackTrace();
+ System.exit(3);
+ }
+ }
+
+ private static void usageAndExit() {
+ MigrationUsage();
+ System.exit(1);
+ }
+
+ private static void MigrationUsage() {
+ System.out.println(
+ "Usage: es-to-cassndra-migration/es-to-cassndra-migration-import-only/es-to-cassndra-migration-export-only <configuration dir>");
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
new file mode 100644
index 0000000000..6b6f11c5a6
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
@@ -0,0 +1,169 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
+import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
+
+public class ExportImportMenu {
+
+ private static void usageAndExit() {
+ exportUsage();
+ importUsage();
+ exportUsersUsage();
+
+ System.exit(1);
+ }
+
+ private static void importUsage() {
+ System.out.println("Usage: import <titan.properties> <graph file location>");
+ }
+
+ private static void exportUsage() {
+ System.out.println("Usage: export <titan.properties> <output directory>");
+ }
+
+ private static void dataReportUsage() {
+ System.out.println("Usage: get-data-report-from-graph-ml <full path of .graphml file>");
+ }
+
+ private static void exportUsersUsage() {
+ System.out.println("Usage: exportusers <titan.properties> <output directory>");
+ }
+
+ public static void main(String[] args) throws Exception {
+
+ if (args == null || args.length < 1) {
+ usageAndExit();
+ }
+
+ String operation = args[0];
+ GraphMLConverter graphMLConverter = new GraphMLConverter();
+ switch (operation.toLowerCase()) {
+
+ case "export":
+ boolean isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ exportUsage();
+ System.exit(1);
+ }
+
+ boolean result = graphMLConverter.exportGraph(args);
+ if (result == false) {
+ System.exit(2);
+ }
+
+ break;
+ case "import":
+ isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ importUsage();
+ System.exit(1);
+ }
+ result = graphMLConverter.importGraph(args);
+ if (result == false) {
+ System.exit(2);
+ }
+ break;
+
+ case "exportusers":
+ isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ importUsage();
+ System.exit(1);
+ }
+ result = graphMLConverter.exportUsers(args);
+ if (result == false) {
+ System.exit(2);
+ }
+ break;
+
+ case "findproblem":
+ isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ importUsage();
+ System.exit(1);
+ }
+ result = graphMLConverter.findErrorInJsonGraph(args);
+ if (result == false) {
+ System.exit(2);
+ }
+ break;
+ case "export-as-graph-ml":
+ isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ exportUsage();
+ System.exit(1);
+ }
+ String mlFile = graphMLConverter.exportGraphMl(args);
+ if (mlFile == null) {
+ System.exit(2);
+ }
+ break;
+ case "export-as-graph-ml-with-data-report":
+ isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ exportUsage();
+ System.exit(1);
+ }
+ mlFile = graphMLConverter.exportGraphMl(args);
+ if (mlFile == null) {
+ System.exit(2);
+ }
+ String[] dataArgs = new String[] { mlFile };
+ mlFile = new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs);
+ if (mlFile == null) {
+ System.exit(2);
+ }
+ break;
+ case "get-data-report-from-graph-ml":
+ isValid = verifyParamsLength(args, 2);
+ if (false == isValid) {
+ dataReportUsage();
+ System.exit(1);
+ }
+ dataArgs = new String[] { args[1] };
+ mlFile = new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs);
+ if (mlFile == null) {
+ System.exit(2);
+ }
+ break;
+ default:
+ usageAndExit();
+ }
+
+ }
+
+ private static boolean verifyParamsLength(String[] args, int i) {
+ if (args == null) {
+ if (i > 0) {
+ return false;
+ }
+ return true;
+ }
+
+ if (args.length >= i) {
+ return true;
+ }
+ return false;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java
new file mode 100644
index 0000000000..d7ed4600dd
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java
@@ -0,0 +1,251 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import java.util.Arrays;
+import java.util.Optional;
+
+import org.openecomp.sdc.asdctool.impl.PopulateComponentCache;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.AppConfig;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.DerivedFromAlignment;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.GroupsAlignment;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.ServiceMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1604.VfcNamingAlignment;
+import org.openecomp.sdc.asdctool.impl.migration.v1607.CsarMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1610.TitanFixUtils;
+import org.openecomp.sdc.asdctool.impl.migration.v1610.ToscaArtifactsAlignment;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class MigrationMenu {
+
+ private static Logger log = LoggerFactory.getLogger(MigrationMenu.class.getName());
+ private static final String SERVICE_MIGARTION_BEAN = "serviceMigrationBean";
+
+ private static enum MigrationOperationEnum {
+ MIGRATION_1602_1604("migrate-1602-1604", SERVICE_MIGARTION_BEAN),
+ ALIGN_DERIVED_FROM_1604("align-derived-from-1604", "derivedFromAlignment"),
+ MIGRATE_1604_1607("migrate-1604-1607", SERVICE_MIGARTION_BEAN),
+ ALIGN_VFC_NAMES_1604("align-vfc-names-1604", "vfcNamingAlignmentBean"),
+ TEST_REMOVE_HEAT_PLACEHOLDERS("testremoveheatplaceholders", SERVICE_MIGARTION_BEAN),
+ TEST_ADD_GROUP_UUIDS("testaddgroupuuids", SERVICE_MIGARTION_BEAN),
+ ALIGN_GROUPS("align-groups", "groupsAlignment"),
+ CLEAN_CSAR("clean-csar", "csarMigration"),
+ POPULATE_COMPONENT_CACHE("populate-component-cache", "populateComponentCache"),
+ FIX_PROPERTIES("fix-properties", "titanFixUtils"),
+ ALIGN_TOSCA_ARTIFACTS("align-tosca-artifacts", "toscaArtifactsAlignment"),
+ FIX_ICONS("fix-icons", "titanFixUtils");
+
+ private String value, beanName;
+
+ public static MigrationOperationEnum findByValue(String value) {
+ Optional<MigrationOperationEnum> optionalFound = Arrays.asList(MigrationOperationEnum.values()).stream().filter(e -> e.getValue().equalsIgnoreCase(value)).findAny();
+ return optionalFound.isPresent() ? optionalFound.get() : null;
+ }
+
+ MigrationOperationEnum(String value, String beanName) {
+ this.value = value;
+ this.beanName = beanName;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public String getBeanName() {
+ return beanName;
+ }
+ };
+
+ public static void main(String[] args) throws Exception {
+
+ if (args == null || args.length < 2) {
+ usageAndExit();
+ }
+ MigrationOperationEnum operationEnum = MigrationOperationEnum.findByValue(args[0]);
+ String appConfigDir = args[1];
+ String dataInputFileDir = null;
+ if (operationEnum == MigrationOperationEnum.ALIGN_DERIVED_FROM_1604) {
+ dataInputFileDir = args[2];
+ }
+ AnnotationConfigApplicationContext context = initContext(appConfigDir);
+ try {
+ ServiceMigration serviceMigration = (ServiceMigration) context.getBean(SERVICE_MIGARTION_BEAN);
+ switch (operationEnum) {
+ case MIGRATION_1602_1604:
+ log.debug("Start Titan migration from 1602 version to 1604");
+ if (serviceMigration.migrate1602to1604(appConfigDir)) {
+ log.debug("Titan migration from 1602 version to 1604 was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("Titan migration from 1602 version to 1604 was failed");
+ System.exit(2);
+ }
+ case MIGRATE_1604_1607:
+ log.debug("Start Titan migration from 1604 version to 1607");
+ if (serviceMigration.migrate1604to1607(appConfigDir)) {
+ log.debug("Titan migration from 1604 version to 1607 was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("Titan migration from 1604 version to 1607 was failed");
+ System.exit(2);
+ }
+ break;
+ case ALIGN_VFC_NAMES_1604:
+ VfcNamingAlignment vfcNamingAlignment = (VfcNamingAlignment) context.getBean(operationEnum.getBeanName());
+ log.debug("Start VFC naming alignment on 1604");
+ if (vfcNamingAlignment.alignVfcNames1604(appConfigDir)) {
+ log.debug("VFC naming alignment on 1604 was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("VFC naming alignment on 1604 was failed");
+ System.exit(2);
+ }
+ break;
+ case TEST_REMOVE_HEAT_PLACEHOLDERS:
+ boolean check = serviceMigration.testRemoveHeatPlaceHolders(appConfigDir);
+ if (check == true) {
+ System.exit(0);
+ } else {
+ System.exit(2);
+ }
+ break;
+ case TEST_ADD_GROUP_UUIDS:
+ check = serviceMigration.testAddGroupUuids(appConfigDir);
+ if (check == true) {
+ System.exit(0);
+ } else {
+ System.exit(2);
+ }
+ break;
+ case ALIGN_DERIVED_FROM_1604:
+ DerivedFromAlignment derivedFromAlignment = (DerivedFromAlignment) context.getBean(operationEnum.getBeanName());
+ log.debug("Start derived from alignment on 1604");
+ if (derivedFromAlignment.alignDerivedFrom1604(appConfigDir, dataInputFileDir)) {
+ log.debug("Derived from alignment on 1604 was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("Derived from alignment on 1604 was failed");
+ System.exit(2);
+ }
+ break;
+ case ALIGN_GROUPS:
+ GroupsAlignment groupsAlignment = (GroupsAlignment) context.getBean(operationEnum.getBeanName());
+ log.debug("Start derived from alignment on 1604");
+ if (groupsAlignment.alignGroups(appConfigDir)) {
+ log.debug("Groups alignment was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("Groups alignment was failed");
+ System.exit(2);
+ }
+ break;
+ case CLEAN_CSAR:
+ log.debug("Start remove CSAR resources");
+ CsarMigration csarMigration = (CsarMigration) context.getBean(operationEnum.getBeanName());
+ // TODO Show to Michael L fixed return value
+ if (csarMigration.removeCsarResources()) {
+ log.debug("Remove CSAR resources finished successfully");
+ System.exit(0);
+ } else {
+ log.debug("Remove CSAR resources failed");
+ System.exit(2);
+ }
+ break;
+ case POPULATE_COMPONENT_CACHE:
+ PopulateComponentCache populateComponentCache = (PopulateComponentCache) context.getBean(operationEnum.getBeanName());
+ // TODO Show to Michael L No return value always returns 0
+ populateComponentCache.populateCache();
+ System.exit(0);
+ break;
+ case FIX_PROPERTIES:
+ log.debug("Start fix capability properties types");
+ TitanFixUtils titanFixUtils = (TitanFixUtils) context.getBean(operationEnum.getBeanName());
+ // TODO Show to Michael L fixed return value
+ if (titanFixUtils.fixCapabiltyPropertyTypes()) {
+ log.debug("Fix capability properties types finished successfully");
+ System.exit(0);
+ } else {
+ log.debug("Fix capability properties types failed");
+ System.exit(2);
+ }
+ break;
+ case FIX_ICONS:
+ log.debug("Start fix icons of vl and eline");
+ titanFixUtils = (TitanFixUtils) context.getBean(operationEnum.getBeanName());
+ // TODO Show to Michael L fixed return value
+ if (titanFixUtils.fixIconsInNormatives()) {
+ log.debug("Fix icons of vl and eline finished successfully");
+ System.exit(0);
+ } else {
+ log.debug("Fix icons of vl and eline failed");
+ System.exit(2);
+ }
+ break;
+ case ALIGN_TOSCA_ARTIFACTS:
+ log.debug("Start align tosca artifacts");
+ ToscaArtifactsAlignment toscaArtifactsAlignment = (ToscaArtifactsAlignment) context.getBean(operationEnum.getBeanName());
+ boolean isSuccessful = toscaArtifactsAlignment.alignToscaArtifacts();
+ if (isSuccessful) {
+ log.debug("Tosca Artifacts alignment was finished successfull");
+ System.exit(0);
+ } else {
+ log.debug("Tosca Artifacts alignment has failed");
+ System.exit(2);
+ }
+ break;
+ default:
+ usageAndExit();
+ }
+ } catch (Throwable t) {
+ t.printStackTrace();
+ System.exit(3);
+ } finally {
+ context.close();
+ }
+ }
+
+ private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+ ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(AppConfig.class);
+ return context;
+ }
+
+ private static void usageAndExit() {
+ MigrationUsage();
+ System.exit(1);
+ }
+
+ private static void MigrationUsage() {
+ System.out.println("Usage: migrate-1602-1604 <configuration dir>");
+ System.out.println("Usage: migrate-1604-1607 <configuration dir>");
+ System.out.println("Usage: align-vfc-names-1604 <configuration dir>");
+ System.out.println("Usage: align-derived-from-1604 <configuration dir> <data_input_file dir>");
+ System.out.println("Usage: align-groups <configuration dir>");
+ System.out.println("Usage: fix-properties <configuration dir>");
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/RemoveUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/RemoveUtils.java
new file mode 100644
index 0000000000..ddece759c0
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/RemoveUtils.java
@@ -0,0 +1,78 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.asdctool.impl.ProductLogic;
+
+/**
+ * Created by mlando on 2/23/2016.
+ */
+public class RemoveUtils {
+
+ public static void main(String[] args) throws Exception {
+
+ if (args == null || args.length < 1) {
+ removeUsage();
+ }
+
+ String operation = args[0];
+
+ switch (operation.toLowerCase()) {
+
+ case "remove-products":
+
+ boolean isValid = verifyParamsLength(args, 5);
+ if (false == isValid) {
+ removeUsage();
+ System.exit(1);
+ }
+
+ ProductLogic productLogic = new ProductLogic();
+ boolean result = productLogic.deleteAllProducts(args[1], args[2], args[3], args[4]);
+
+ if (result == false) {
+ System.exit(2);
+ }
+ break;
+ default:
+ removeUsage();
+ }
+
+ }
+
+ private static void removeUsage() {
+ System.out.println("Usage: remove-products <titan.properties> <BE host> <BE port> <admin user>");
+ }
+
+ private static boolean verifyParamsLength(String[] args, int i) {
+ if (args == null) {
+ if (i > 0) {
+ return false;
+ }
+ return true;
+ }
+
+ if (args.length >= i) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
new file mode 100644
index 0000000000..e620510916
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
@@ -0,0 +1,122 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.main;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateIsVnfMenu {
+
+ private static Logger log = LoggerFactory.getLogger(UpdateIsVnfMenu.class.getName());
+
+ private static void usageAndExit() {
+ updateIsVnfTrueUsage();
+ System.exit(1);
+ }
+
+ private static void updateIsVnfTrueUsage() {
+ System.out.println(
+ "Usage: updateIsVnfTrue <titan.properties> <systemServiceName1,systemServiceName2,...,systemServiceNameN>");
+ }
+
+ public static void main(String[] args) throws Exception {
+
+ if (args == null || args.length < 1) {
+ usageAndExit();
+ }
+
+ UpdatePropertyOnVertex updatePropertyOnVertex = new UpdatePropertyOnVertex();
+ String operation = args[0];
+
+ switch (operation.toLowerCase()) {
+
+ case "updateisvnftrue":
+ boolean isValid = verifyParamsLength(args, 3);
+ if (false == isValid) {
+ updateIsVnfTrueUsage();
+ System.exit(1);
+ }
+
+ Map<String, Object> keyValueToSet = new HashMap<>();
+ keyValueToSet.put(GraphPropertiesDictionary.IS_VNF.getProperty(), true);
+
+ List<Map<String, Object>> orCriteria = buildCriteriaFromSystemServiceNames(args[2]);
+ Integer updatePropertyOnServiceAtLeastCertified = updatePropertyOnVertex
+ .updatePropertyOnServiceAtLeastCertified(args[1], keyValueToSet, orCriteria);
+
+ if (updatePropertyOnServiceAtLeastCertified == null) {
+ System.exit(2);
+ } else if (updatePropertyOnServiceAtLeastCertified.intValue() >= 0) {
+ log.debug("Number of updated services is {}", updatePropertyOnServiceAtLeastCertified.intValue());
+ System.exit(0);
+ }
+
+ break;
+ default:
+ usageAndExit();
+ }
+
+ }
+
+ private static List<Map<String, Object>> buildCriteriaFromSystemServiceNames(String systemList) {
+
+ List<Map<String, Object>> systemNames = new ArrayList<>();
+
+ String[] split = systemList.split(",");
+ if (split != null) {
+ for (String systemName : split) {
+ systemName = systemName.trim();
+
+ Map<String, Object> map = new HashMap();
+ map.put(GraphPropertiesDictionary.SYSTEM_NAME.getProperty(), systemName);
+ map.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Service.getName());
+
+ systemNames.add(map);
+ }
+ }
+
+ return systemNames;
+ }
+
+ private static boolean verifyParamsLength(String[] args, int i) {
+ if (args == null) {
+ if (i > 0) {
+ return false;
+ }
+ return true;
+ }
+
+ if (args.length >= i) {
+ return true;
+ }
+ return false;
+ }
+
+}