From d06b3dd32d8d1bc43a0c956aff323173cafe0c59 Mon Sep 17 00:00:00 2001 From: vasraz Date: Mon, 15 Nov 2021 15:45:28 +0000 Subject: Move CSV's generation folder to target Signed-off-by: Vasyl Razinkov Change-Id: I379f20499eeb81f2dc82a943eaef4133ec6f6408 Issue-ID: SDC-3771 --- .../sdc/asdctool/impl/ArtifactUuidFix.java | 45 +++++++++++----------- .../executor/ArtifactValidatorExecutor.java | 11 +++--- .../ArtifactValidatorExecutorContract.java | 8 +--- .../IArtifactValidatorExecutorContract.java | 5 +-- .../migration/task/MigrationTasksTest.java | 41 ++++++++++---------- 5 files changed, 54 insertions(+), 56 deletions(-) diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java index d6aad24a24..1765bd5b1f 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java @@ -19,17 +19,17 @@ */ package org.openecomp.sdc.asdctool.impl; -import static java.nio.charset.StandardCharsets.UTF_8; - import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import fj.data.Either; import java.io.BufferedWriter; +import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.EnumMap; @@ -44,9 +44,9 @@ import org.openecomp.sdc.asdctool.impl.validator.utils.VfModuleArtifactPayloadEx import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload; import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao; import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus; +import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao; import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus; import org.openecomp.sdc.be.dao.jsongraph.GraphVertex; -import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao; import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum; import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum; import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; @@ -85,9 +85,10 @@ import org.openecomp.sdc.be.tosca.ToscaRepresentation; import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum; import org.openecomp.sdc.common.api.ArtifactTypeEnum; import org.openecomp.sdc.common.api.Constants; -import org.openecomp.sdc.common.log.wrappers.Logger; import org.openecomp.sdc.common.util.GeneralUtility; import org.openecomp.sdc.exception.ResponseFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @org.springframework.stereotype.Component("artifactUuidFix") @@ -95,7 +96,7 @@ public class ArtifactUuidFix { private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} "; private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources "; - private static Logger log = Logger.getLogger(ArtifactUuidFix.class.getName()); + private static final Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class); private JanusGraphDao janusGraphDao; private ToscaOperationFacade toscaOperationFacade; private ToscaExportHandler toscaExportUtils; @@ -150,8 +151,8 @@ public class ArtifactUuidFix { private boolean fetchFaultVf(List vfLst, long time) { log.info("Find fault VF "); - String fileName = "fault_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile("fault_", "" + time)), StandardCharsets.UTF_8))) { writer.write("vf name, vf id, state, version\n"); Map hasProps = new EnumMap<>(GraphPropertyEnum.class); hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); @@ -213,8 +214,8 @@ public class ArtifactUuidFix { log.info("No services as input"); return true; } - String fileName = "problemVf_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemVf_", "" + time)), StandardCharsets.UTF_8))) { writer.write("vf name, vf id, state, version, example service name\n"); Set vfIds = new HashSet<>(); for (Service service : serviceList) { @@ -242,7 +243,6 @@ public class ArtifactUuidFix { } } } - log.info("output file with list of Vf : {}", fileName); } catch (Exception e) { log.info("Failed to fetch services ", e); return false; @@ -254,8 +254,8 @@ public class ArtifactUuidFix { private boolean fetchServices(String fixServices, List serviceList, long time) { log.info("Find problem Services {}", fixServices); - String fileName = "problemService_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemService_", "" + time)), StandardCharsets.UTF_8))) { writer.write("service name, service id, state, version\n"); Map hasProps = new EnumMap<>(GraphPropertyEnum.class); hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name()); @@ -293,7 +293,6 @@ public class ArtifactUuidFix { } janusGraphDao.commit(); } - log.info("output file with list of services : {}", fileName); } catch (Exception e) { log.info("Failed to fetch services ", e); return false; @@ -529,6 +528,7 @@ public class ArtifactUuidFix { return false; } + private boolean fix(List vfLst, List serviceList, Map> nodesToFixTosca, Map> vfToFixTosca, Map> servicesToFixTosca) { boolean res = true; @@ -541,8 +541,8 @@ public class ArtifactUuidFix { } Set fixedIds = new HashSet<>(); long time = System.currentTimeMillis(); - String fileName = "FailedGenerateTosca" + "_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile("FailedGenerateTosca", "" + time)), StandardCharsets.UTF_8))) { writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n"); List failedList = new ArrayList<>(); if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) { @@ -574,6 +574,7 @@ public class ArtifactUuidFix { if (servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) { generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList, failedList); } + for (Component component : serviceList) { res = generateToscaPerComponent(fixedIds, component); if (res) { @@ -721,7 +722,7 @@ public class ArtifactUuidFix { } } } - log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), res); + log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), edgeLabelEnum, res); return res; } @@ -857,10 +858,10 @@ public class ArtifactUuidFix { if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) { fixVfGroup(resource, artifactsMap, group); } - if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && (group.getArtifacts() == null || group.getArtifacts() - .isEmpty())) { + if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && + (group.getArtifacts() == null || group.getArtifacts().isEmpty())) { log.debug("Migration1707ArtifactUuidFix add group to delete list fix resource: id {}, name {} ", resource.getUniqueId(), - resource.getName(), group.getName()); + resource.getName()); groupsToDelete.add(group); } } @@ -1008,8 +1009,8 @@ public class ArtifactUuidFix { public boolean validateTosca(Map> vertices, Map> compToFix, String name) { boolean result = true; long time = System.currentTimeMillis(); - String fileName = name + "_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile(name, "" + time)), StandardCharsets.UTF_8))) { writer.write("name, UUID, invariantUUID, state, version\n"); for (Map.Entry> entry : vertices.entrySet()) { List compList = entry.getValue(); @@ -1128,7 +1129,7 @@ public class ArtifactUuidFix { private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List groupsForCurrVF, ArtifactDefinition vfModuleArtifact) { - log.debug("generate new vf module for component. name {}, id {}, Version {}", instance.getName(), instance.getUniqueId()); + log.debug("generate new vf module for component. name {}, id {}", instance.getName(), instance.getUniqueId()); String uniqueId = UniqueIdBuilder .buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel()); vfModuleArtifact.setUniqueId(uniqueId); diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java index 531d54a466..07f7ec0fd9 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java @@ -23,6 +23,7 @@ import static java.nio.charset.StandardCharsets.UTF_8; import fj.data.Either; import java.io.BufferedWriter; +import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; @@ -36,9 +37,9 @@ import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao; import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus; import org.openecomp.sdc.be.dao.jsongraph.GraphVertex; -import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao; import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; import org.openecomp.sdc.be.model.ArtifactDefinition; @@ -55,7 +56,7 @@ public abstract class ArtifactValidatorExecutor { private final ToscaOperationFacade toscaOperationFacade; private final String name; - public ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) { + protected ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) { this.janusGraphDao = janusGraphDao; this.toscaOperationFacade = toscaOperationFacade; this.name = name; @@ -72,7 +73,7 @@ public abstract class ArtifactValidatorExecutor { log.error("getVerticesToValidate failed " + resultsEither.right().value()); return result; } - System.out.println("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan"); + log.info("getVerticesToValidate: {} vertices to scan", resultsEither.left().value().size()); List componentsList = resultsEither.left().value(); componentsList.forEach(vertex -> { String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID); @@ -96,8 +97,8 @@ public abstract class ArtifactValidatorExecutor { public boolean validate(Map> vertices, String outputFilePath) { boolean result = true; long time = System.currentTimeMillis(); - String fileName = outputFilePath + this.getName() + "_" + time + ".csv"; - try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) { + try (Writer writer = new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(File.createTempFile(outputFilePath + this.getName(), "" + time)), UTF_8))) { writer.write("name, UUID, invariantUUID, state, version\n"); Collection> collection = vertices.values(); for (List compList : collection) { diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutorContract.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutorContract.java index 621b832898..a940407d19 100644 --- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutorContract.java +++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutorContract.java @@ -20,7 +20,6 @@ package org.openecomp.sdc.asdctool.impl.validator.executor; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.Mockito.mock; import java.util.HashMap; @@ -51,8 +50,7 @@ public interface ArtifactValidatorExecutorContract { VertexTypeEnum type = null; Map hasProps = null; - Assertions.assertThrows(NullPointerException.class, () -> - testSubject.getVerticesToValidate(type, hasProps) + Assertions.assertThrows(NullPointerException.class, () -> testSubject.getVerticesToValidate(type, hasProps) ); } @@ -68,8 +66,6 @@ public interface ArtifactValidatorExecutorContract { Map> vertices = new HashMap<>(); vertices.put("stam", linkedList); - // Initially no outputFilePath was passed to this function (hence it is set to null) - // TODO: Fix this null and see if the argument is used by this function - assertFalse(testSubject.validate(vertices, null)); + Assertions.assertFalse(testSubject.validate(vertices, "target/")); } } diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutorContract.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutorContract.java index 8af265e715..a21afb85fe 100644 --- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutorContract.java +++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutorContract.java @@ -20,13 +20,13 @@ package org.openecomp.sdc.asdctool.impl.validator.executor; +import static org.mockito.Mockito.mock; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao; import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade; -import static org.mockito.Mockito.mock; - public abstract class IArtifactValidatorExecutorContract { protected abstract IArtifactValidatorExecutor createTestSubject( @@ -47,4 +47,3 @@ public abstract class IArtifactValidatorExecutorContract { ); } } - diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java index 9a6db29ae7..5efb23f3e3 100644 --- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java +++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,23 +20,20 @@ package org.openecomp.sdc.asdctool.migration.task; -import static org.junit.jupiter.api.Assertions.fail; - -import org.apache.commons.lang3.StringUtils; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.openecomp.sdc.asdctool.migration.core.DBVersion; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; -import org.openecomp.sdc.asdctool.migration.scanner.ClassScanner; - import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import org.apache.commons.lang.StringUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.openecomp.sdc.asdctool.migration.core.DBVersion; +import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.scanner.ClassScanner; - -public class MigrationTasksTest { +public class MigrationTasksTest { public static final String MIGRATIONS_BASE_PACKAGE = "org.openecomp.sdc.asdctool.migration.tasks"; private List migrations; @@ -52,21 +49,25 @@ public class MigrationTasksTest { Map> migrationsByVersion = migrations.stream().collect(Collectors.groupingBy(Migration::getVersion)); migrationsByVersion.forEach((version, migrations) -> { if (migrations.size() > 1) { - System.out.println(String.format("the following migration tasks have the same version %s. versions must be unique", version.toString())); - fail(String.format("migration tasks %s has same version %s. migration tasks versions must be unique.", getMigrationsNameAsString(migrations), version.toString())); + System.out.println( + String.format("the following migration tasks have the same version %s. versions must be unique", version.toString())); + Assertions.fail(String.format("migration tasks %s has same version %s. migration tasks versions must be unique.", + getMigrationsNameAsString(migrations), version.toString())); } }); } @Test public void testNoTaskWithVersionGreaterThanCurrentVersion() throws Exception { - Set migrationsWithVersionsGreaterThanCurrent = migrations.stream().filter(mig -> mig.getVersion().compareTo(DBVersion.DEFAULT_VERSION) > 0) - .collect(Collectors.toSet()); + Set migrationsWithVersionsGreaterThanCurrent = migrations.stream() + .filter(mig -> mig.getVersion().compareTo(DBVersion.DEFAULT_VERSION) > 0) + .collect(Collectors.toSet()); if (!migrationsWithVersionsGreaterThanCurrent.isEmpty()) { - fail(String.format("migrations tasks %s have version which is greater than DBVersion.DEFAULT_VERSION %s. did you forget to update current version?", - getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent), - DBVersion.DEFAULT_VERSION.toString())); + Assertions.fail(String.format( + "migrations tasks %s have version which is greater than DBVersion.DEFAULT_VERSION %s. did you forget to update current version?", + getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent), + DBVersion.DEFAULT_VERSION.toString())); } } -- cgit 1.2.3-korg