summaryrefslogtreecommitdiffstats
path: root/asdctool/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'asdctool/src/main')
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java45
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java11
2 files changed, 29 insertions, 27 deletions
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
index d6aad24a24..1765bd5b1f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
@@ -19,17 +19,17 @@
*/
package org.openecomp.sdc.asdctool.impl;
-import static java.nio.charset.StandardCharsets.UTF_8;
-
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import fj.data.Either;
import java.io.BufferedWriter;
+import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EnumMap;
@@ -44,9 +44,9 @@ import org.openecomp.sdc.asdctool.impl.validator.utils.VfModuleArtifactPayloadEx
import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
@@ -85,9 +85,10 @@ import org.openecomp.sdc.be.tosca.ToscaRepresentation;
import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
import org.openecomp.sdc.common.api.ArtifactTypeEnum;
import org.openecomp.sdc.common.api.Constants;
-import org.openecomp.sdc.common.log.wrappers.Logger;
import org.openecomp.sdc.common.util.GeneralUtility;
import org.openecomp.sdc.exception.ResponseFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@org.springframework.stereotype.Component("artifactUuidFix")
@@ -95,7 +96,7 @@ public class ArtifactUuidFix {
private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ";
private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
- private static Logger log = Logger.getLogger(ArtifactUuidFix.class.getName());
+ private static final Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class);
private JanusGraphDao janusGraphDao;
private ToscaOperationFacade toscaOperationFacade;
private ToscaExportHandler toscaExportUtils;
@@ -150,8 +151,8 @@ public class ArtifactUuidFix {
private boolean fetchFaultVf(List<Resource> vfLst, long time) {
log.info("Find fault VF ");
- String fileName = "fault_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile("fault_", "" + time)), StandardCharsets.UTF_8))) {
writer.write("vf name, vf id, state, version\n");
Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
@@ -213,8 +214,8 @@ public class ArtifactUuidFix {
log.info("No services as input");
return true;
}
- String fileName = "problemVf_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemVf_", "" + time)), StandardCharsets.UTF_8))) {
writer.write("vf name, vf id, state, version, example service name\n");
Set<String> vfIds = new HashSet<>();
for (Service service : serviceList) {
@@ -242,7 +243,6 @@ public class ArtifactUuidFix {
}
}
}
- log.info("output file with list of Vf : {}", fileName);
} catch (Exception e) {
log.info("Failed to fetch services ", e);
return false;
@@ -254,8 +254,8 @@ public class ArtifactUuidFix {
private boolean fetchServices(String fixServices, List<Service> serviceList, long time) {
log.info("Find problem Services {}", fixServices);
- String fileName = "problemService_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemService_", "" + time)), StandardCharsets.UTF_8))) {
writer.write("service name, service id, state, version\n");
Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
@@ -293,7 +293,6 @@ public class ArtifactUuidFix {
}
janusGraphDao.commit();
}
- log.info("output file with list of services : {}", fileName);
} catch (Exception e) {
log.info("Failed to fetch services ", e);
return false;
@@ -529,6 +528,7 @@ public class ArtifactUuidFix {
return false;
}
+
private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
boolean res = true;
@@ -541,8 +541,8 @@ public class ArtifactUuidFix {
}
Set<String> fixedIds = new HashSet<>();
long time = System.currentTimeMillis();
- String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile("FailedGenerateTosca", "" + time)), StandardCharsets.UTF_8))) {
writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
List<Component> failedList = new ArrayList<>();
if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) {
@@ -574,6 +574,7 @@ public class ArtifactUuidFix {
if (servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) {
generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList, failedList);
}
+
for (Component component : serviceList) {
res = generateToscaPerComponent(fixedIds, component);
if (res) {
@@ -721,7 +722,7 @@ public class ArtifactUuidFix {
}
}
}
- log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), res);
+ log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), edgeLabelEnum, res);
return res;
}
@@ -857,10 +858,10 @@ public class ArtifactUuidFix {
if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) {
fixVfGroup(resource, artifactsMap, group);
}
- if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && (group.getArtifacts() == null || group.getArtifacts()
- .isEmpty())) {
+ if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) &&
+ (group.getArtifacts() == null || group.getArtifacts().isEmpty())) {
log.debug("Migration1707ArtifactUuidFix add group to delete list fix resource: id {}, name {} ", resource.getUniqueId(),
- resource.getName(), group.getName());
+ resource.getName());
groupsToDelete.add(group);
}
}
@@ -1008,8 +1009,8 @@ public class ArtifactUuidFix {
public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix, String name) {
boolean result = true;
long time = System.currentTimeMillis();
- String fileName = name + "_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile(name, "" + time)), StandardCharsets.UTF_8))) {
writer.write("name, UUID, invariantUUID, state, version\n");
for (Map.Entry<String, List<Component>> entry : vertices.entrySet()) {
List<Component> compList = entry.getValue();
@@ -1128,7 +1129,7 @@ public class ArtifactUuidFix {
private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
ArtifactDefinition vfModuleArtifact) {
- log.debug("generate new vf module for component. name {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
+ log.debug("generate new vf module for component. name {}, id {}", instance.getName(), instance.getUniqueId());
String uniqueId = UniqueIdBuilder
.buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
vfModuleArtifact.setUniqueId(uniqueId);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java
index 531d54a466..07f7ec0fd9 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java
@@ -23,6 +23,7 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import fj.data.Either;
import java.io.BufferedWriter;
+import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
@@ -36,9 +37,9 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.ArtifactDefinition;
@@ -55,7 +56,7 @@ public abstract class ArtifactValidatorExecutor {
private final ToscaOperationFacade toscaOperationFacade;
private final String name;
- public ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) {
+ protected ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) {
this.janusGraphDao = janusGraphDao;
this.toscaOperationFacade = toscaOperationFacade;
this.name = name;
@@ -72,7 +73,7 @@ public abstract class ArtifactValidatorExecutor {
log.error("getVerticesToValidate failed " + resultsEither.right().value());
return result;
}
- System.out.println("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan");
+ log.info("getVerticesToValidate: {} vertices to scan", resultsEither.left().value().size());
List<GraphVertex> componentsList = resultsEither.left().value();
componentsList.forEach(vertex -> {
String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
@@ -96,8 +97,8 @@ public abstract class ArtifactValidatorExecutor {
public boolean validate(Map<String, List<Component>> vertices, String outputFilePath) {
boolean result = true;
long time = System.currentTimeMillis();
- String fileName = outputFilePath + this.getName() + "_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+ try (Writer writer = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(File.createTempFile(outputFilePath + this.getName(), "" + time)), UTF_8))) {
writer.write("name, UUID, invariantUUID, state, version\n");
Collection<List<Component>> collection = vertices.values();
for (List<Component> compList : collection) {