From 51d50f0ef642e0f996a1c8b8d2ef4838bdfec892 Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Sun, 10 Dec 2017 18:55:03 +0200 Subject: Final commit to master merge from Change-Id: Ib464f9a8828437c86fe6def8af238aaf83473507 Issue-ID: SDC-714 Signed-off-by: Tal Gitelman --- .../ArtifactUUIDFixConfiguration.java | 327 ++++++ .../configuration/ConfigurationUploader.java | 28 + .../sdc/asdctool/impl/ArtifactUuidFix.java | 1202 ++++++++++++++++++++ .../asdctool/impl/validator/ArtifactToolBL.java | 38 + .../validator/config/ValidationConfigManager.java | 12 +- .../config/ValidationToolConfiguration.java | 18 + .../executers/ArtifactValidatorExecuter.java | 145 +++ .../executers/IArtifactValidatorExecuter.java | 9 + .../NodeToscaArtifactsValidatorExecuter.java | 41 + .../ServiceToscaArtifactsValidatorExecutor.java | 39 + .../VFToscaArtifactValidatorExecutor.java | 43 + .../impl/validator/utils/ReportManager.java | 2 +- .../sdc/asdctool/main/ArtifactUUIDFixMenu.java | 45 + .../sdc/asdctool/main/ArtifactValidatorTool.java | 46 + .../sdc/asdctool/main/UpdateIsVnfMenu.java | 1 - .../sdc/asdctool/main/ValidationTool.java | 2 +- .../migration/config/MigrationSpringConfig.java | 309 ++++- .../asdctool/migration/core/SdcMigrationTool.java | 10 +- .../core/execution/MigrationExecutionResult.java | 4 +- .../core/execution/MigrationExecutor.java | 4 +- .../core/execution/MigrationExecutorImpl.java | 8 +- .../migration/core/task/IMigrationStage.java | 20 + .../asdctool/migration/core/task/Migration.java | 16 +- .../migration/core/task/PostMigration.java | 19 + .../asdctool/migration/dao/MigrationTasksDao.java | 20 +- .../sdc/asdctool/migration/main/MigrationMenu.java | 13 +- .../migration/resolver/MigrationResolver.java | 6 +- .../resolver/SpringBeansMigrationResolver.java | 29 +- .../asdctool/migration/service/SdcRepoService.java | 4 +- .../migration/tasks/handlers/OutputHandler.java | 9 + .../migration/tasks/handlers/XlsOutputHandler.java | 66 ++ .../tasks/mig1710/UpgradeMigration1710.java | 739 ++++++++++++ .../servlets/ExportImportTitanServlet.java | 4 +- .../resources/config/Artifact-Generator.properties | 264 +++++ .../src/main/resources/config/configuration.yaml | 195 ++-- .../src/main/resources/config/elasticsearch.yml | 7 +- .../src/main/resources/config/titan.properties | 9 +- asdctool/src/main/resources/elasticsearch.yml | 399 +++++++ asdctool/src/main/resources/scripts/UUIDFix1707.sh | 4 +- .../resources/scripts/artifactsIdValidation.sh | 32 + .../src/main/resources/scripts/sdc-migration.sh | 3 + .../resources/scripts/upgradePostMigration1710.sh | 33 + 42 files changed, 4078 insertions(+), 146 deletions(-) create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/IMigrationStage.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java create mode 100644 asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java create mode 100644 asdctool/src/main/resources/config/Artifact-Generator.properties create mode 100644 asdctool/src/main/resources/elasticsearch.yml create mode 100644 asdctool/src/main/resources/scripts/artifactsIdValidation.sh create mode 100644 asdctool/src/main/resources/scripts/upgradePostMigration1710.sh (limited to 'asdctool/src/main') diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java new file mode 100644 index 0000000000..f14196681e --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java @@ -0,0 +1,327 @@ +package org.openecomp.sdc.asdctool.configuration; + +import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix; +import org.openecomp.sdc.be.auditing.impl.AuditingManager; +import org.openecomp.sdc.be.components.ArtifactsResolver; +import org.openecomp.sdc.be.components.distribution.engine.DistributionEngine; +import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder; +import org.openecomp.sdc.be.components.impl.AdditionalInformationBusinessLogic; +import org.openecomp.sdc.be.components.impl.ArtifactResolverImpl; +import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic; +import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic; +import org.openecomp.sdc.be.components.impl.GroupBusinessLogic; +import org.openecomp.sdc.be.components.impl.InputsBusinessLogic; +import org.openecomp.sdc.be.components.impl.ProductBusinessLogic; +import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic; +import org.openecomp.sdc.be.components.impl.ResourceImportManager; +import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic; +import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic; +import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic; +import org.openecomp.sdc.be.components.impl.generic.GenericTypeBusinessLogic; +import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic; +import org.openecomp.sdc.be.components.merge.heat.HeatEnvArtifactsMergeBusinessLogic; +import org.openecomp.sdc.be.components.merge.input.InputsValuesMergingBusinessLogic; +import org.openecomp.sdc.be.components.merge.instance.ComponentInstanceMergeDataBusinessLogic; +import org.openecomp.sdc.be.components.merge.property.DataDefinitionsValuesMergingBusinessLogic; +import org.openecomp.sdc.be.dao.DAOTitanStrategy; +import org.openecomp.sdc.be.dao.TitanClientStrategy; +import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao; +import org.openecomp.sdc.be.dao.cassandra.CassandraClient; +import org.openecomp.sdc.be.dao.config.DAOSpringConfig; +import org.openecomp.sdc.be.dao.es.ElasticSearchClient; +import org.openecomp.sdc.be.dao.impl.AuditingDao; +import org.openecomp.sdc.be.dao.jsongraph.TitanDao; +import org.openecomp.sdc.be.dao.titan.TitanGenericDao; +import org.openecomp.sdc.be.dao.titan.TitanGraphClient; +import org.openecomp.sdc.be.impl.ComponentsUtils; +import org.openecomp.sdc.be.model.cache.ComponentCache; +import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade; +import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation; +import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation; +import org.openecomp.sdc.be.model.operations.impl.CsarOperation; +import org.openecomp.sdc.be.model.operations.impl.ElementOperation; +import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupInstanceOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation; +import org.openecomp.sdc.be.model.operations.impl.PropertyOperation; +import org.openecomp.sdc.be.tosca.CsarUtils; +import org.openecomp.sdc.be.tosca.ToscaExportHandler; +import org.openecomp.sdc.be.user.UserBusinessLogic; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.config.PropertiesFactoryBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; +import org.springframework.core.io.FileSystemResource; + +@Configuration +@Import(DAOSpringConfig.class) +@ComponentScan({ + "org.openecomp.sdc.be.model.operations.impl", + "org.openecomp.sdc.be.model.cache", + "org.openecomp.sdc.be.dao.titan", + "org.openecomp.sdc.be.dao.cassandra", + "org.openecomp.sdc.be.model.jsontitan.operations", + "org.openecomp.sdc.be.dao.jsongraph", + "org.openecomp.sdc.be.tosca", + "org.openecomp.sdc.be.components.merge", + }) +public class ArtifactUUIDFixConfiguration { + + @Bean + public ArtifactUuidFix artifactUuidFix() { + return new ArtifactUuidFix(); + } + + @Bean(name = "cassandra-client") + public CassandraClient cassandraClient() { + return new CassandraClient(); + } + + + @Bean(name = "dao-titan-strategy") + public TitanClientStrategy daoStrategy() { + return new DAOTitanStrategy(); + } + + @Bean(name = "titan-dao") + public TitanDao titanDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) { + return new TitanDao(titanGraphClient); + } + + @Bean(name = "titan-client", initMethod = "createGraph") + public TitanGraphClient titanClient(@Qualifier("dao-titan-strategy") TitanClientStrategy titanClientStrategy) { + return new TitanGraphClient(titanClientStrategy); + } + + @Bean(name = "resource-business-logic") + public ResourceBusinessLogic resourceBusinessLogic() { + return new ResourceBusinessLogic(); + } + +// @Bean(name = "healthCheckBusinessLogic") +// public HealthCheckBusinessLogic healthCheckBusinessLogic() { +// return new HealthCheckBusinessLogic(); +// } +// +// @Bean(name = "distribution-engine-cluster-health") +// public DistributionEngineClusterHealth distributionEngineClusterHealth() { +// return new DistributionEngineClusterHealth(); +// } +// +// @Bean(name = "cassandra-health-check") +// public CassandraHealthCheck cassandraHealthCheck() { +// return new CassandraHealthCheck(); +// } + +// @Bean(name = "switchover-detector") +// public SwitchoverDetector switchoverDetector() { +// return new SwitchoverDetector(); +// } + + @Bean(name = "service-business-logic") + public ServiceBusinessLogic serviceBusinessLogic() { + return new ServiceBusinessLogic(); + } + + @Bean(name = "capability-type-operation") + public CapabilityTypeOperation CapabilityTypeOperation() { + return new CapabilityTypeOperation(); + } + + @Bean(name = "lifecycle-business-logic") + public LifecycleBusinessLogic lifecycleBusinessLogic() { + return new LifecycleBusinessLogic(); + } + + @Bean(name = "property-operation") + public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) { + return new PropertyOperation(titanGenericDao); + } + + @Bean(name = "csar-operation") + public CsarOperation csarOperation() { + return new CsarOperation(); + } + + @Bean(name = "vf-component-instance-business-logic") + public VFComponentInstanceBusinessLogic vFComponentInstanceBusinessLogic() { + return new VFComponentInstanceBusinessLogic(); + } + + @Bean(name = "resource-import-manager") + public ResourceImportManager resourceImportManager() { + return new ResourceImportManager(); + } + + @Bean(name = "group-business-logic") + public GroupBusinessLogic groupBusinessLogic() { + return new GroupBusinessLogic(); + } + + @Bean(name = "inputs-business-logic") + public InputsBusinessLogic inputsBusinessLogic() { + return new InputsBusinessLogic(); + } + + @Bean(name = "composition-business-logic") + public CompositionBusinessLogic compositionBusinessLogic() { + return new CompositionBusinessLogic(); + } + + @Bean(name = "artifacts-business-logic") + public ArtifactsBusinessLogic artifactsBusinessLogic() { + return new ArtifactsBusinessLogic(); + } + + @Bean(name = "component-cache") + public ComponentCache componentCache() { + return new ComponentCache(); + } + + @Bean(name = "componentUtils") + public ComponentsUtils componentsUtils() { + return new ComponentsUtils(); + } + + @Bean(name = "user-business-logic") + public UserBusinessLogic userBusinessLogic() { + return new UserBusinessLogic(); + } + + @Bean(name = "graph-lock-operation") + public GraphLockOperation graphLockOperation() { + return new GraphLockOperation(); + } + + @Bean(name = "titan-generic-dao") + public TitanGenericDao titanGenericDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) { + return new TitanGenericDao(titanGraphClient); + } + + @Bean(name = "element-operation") + public ElementOperation elementOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) { + return new ElementOperation(titanGenericDao); + } + + @Bean(name = "group-operation") + public GroupOperation groupOperation() { + return new GroupOperation(); + } + + @Bean(name = "group-instance-operation") + public GroupInstanceOperation groupInstanceOperation() { + return new GroupInstanceOperation(); + } + + @Bean(name = "group-type-operation") + public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao, @Qualifier("property-operation") PropertyOperation propertyOperation) { + return new GroupTypeOperation(titanGenericDao, propertyOperation); + } + + @Bean(name = "tosca-operation-facade") + public ToscaOperationFacade toscaOperationFacade() { + return new ToscaOperationFacade(); + } + + @Bean(name = "distribution-engine") + public DistributionEngine distributionEngine() { + return null; + } + + @Bean(name = "audit-cassandra-dao") + public AuditCassandraDao auditCassandraDao() { + return new AuditCassandraDao(); + } + + @Bean(name = "service-component-instance-business-logic") + public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() { + return new ServiceComponentInstanceBusinessLogic(); + } + + @Bean("tosca-export-handler") + public ToscaExportHandler toscaExportHandler() { + return new ToscaExportHandler(); + } + + @Bean(name = "component-instance-operation") + public ComponentInstanceOperation componentInstanceOperation() { + return new ComponentInstanceOperation(); + } + + @Bean(name = "additional-information-business-logic") + public AdditionalInformationBusinessLogic additionalInformationBusinessLogic() { + return new AdditionalInformationBusinessLogic(); + } + + @Bean(name = "auditing-manager") + public AuditingManager auditingManager() { + return new AuditingManager(); + } + + @Bean(name = "auditing-dao") + public AuditingDao auditingDao() { + return new AuditingDao(); + } + + @Bean(name = "elasticsearch-client", initMethod = "initialize") + public ElasticSearchClient elasticSearchClient() { + return new ElasticSearchClient(); + } + + @Bean(name = "csar-utils") + public CsarUtils csarUtils() { + return new CsarUtils(); + } + + @Bean(name = "service-distribution-artifacts-builder") + public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() { + return new ServiceDistributionArtifactsBuilder(); + } + + @Bean(name = "product-business-logic") + public ProductBusinessLogic productBusinessLogic() { + return null; + } + + @Bean(name = "dataDefinitionsValuesMergingBusinessLogic") + public DataDefinitionsValuesMergingBusinessLogic dataDefinitionsValuesMergingBusinessLogic() { + return new DataDefinitionsValuesMergingBusinessLogic(); + } + + @Bean(name = "artifacts-resolver") + public ArtifactsResolver artifactsResolver() { + return new ArtifactResolverImpl(); + } + + @Bean(name = "InputsValuesMergingBusinessLogic") + public InputsValuesMergingBusinessLogic InputsValuesMergingBusinessLogic(){ + return new InputsValuesMergingBusinessLogic(); + } + + @Bean(name = "GenericTypeBusinessLogic") + public GenericTypeBusinessLogic genericTypeBusinessLogic(){ + return new GenericTypeBusinessLogic(); + } + + @Bean(name ="componentInstanceMergeDataBusinessLogic") + public ComponentInstanceMergeDataBusinessLogic componentInstanceMergeDataBusinessLogic(){ + return new ComponentInstanceMergeDataBusinessLogic(); + } + + @Bean(name ="heatEnvArtifactsMergeBusinessLogic") + public HeatEnvArtifactsMergeBusinessLogic heatEnvArtifactsMergeBusinessLogic(){ + return new HeatEnvArtifactsMergeBusinessLogic(); + } + + @Bean(name = "elasticsearchConfig") + public PropertiesFactoryBean mapper() { + String configHome = System.getProperty("config.home"); + PropertiesFactoryBean bean = new PropertiesFactoryBean(); + bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml")); + return bean; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java new file mode 100644 index 0000000000..267f20904c --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java @@ -0,0 +1,28 @@ +package org.openecomp.sdc.asdctool.configuration; + +import java.io.File; + +import org.openecomp.sdc.be.config.ConfigurationManager; +import org.openecomp.sdc.common.api.ConfigurationSource; +import org.openecomp.sdc.common.impl.ExternalConfiguration; +import org.openecomp.sdc.common.impl.FSConfigurationSource; + +public class ConfigurationUploader { + + public static void uploadConfigurationFiles(String appConfigDir) { + ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir); + new ConfigurationManager(configurationSource); + ExternalConfiguration.setAppVersion(ConfigurationManager.getConfigurationManager().getConfiguration().getAppVersion()); + System.setProperty("config.home", appConfigDir); + System.setProperty("artifactgenerator.config", buildArtifactGeneratorPath(appConfigDir)); + } + + private static String buildArtifactGeneratorPath(String appConfigDir) { + StringBuilder artifactGeneratorPath = new StringBuilder(appConfigDir); + if(!appConfigDir.endsWith(File.separator)){ + artifactGeneratorPath.append(File.separator); + } + artifactGeneratorPath.append(ConfigurationManager.getConfigurationManager().getConfiguration().getArtifactGeneratorConfig()); + return artifactGeneratorPath.toString(); + } +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java new file mode 100644 index 0000000000..1328f3f51b --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java @@ -0,0 +1,1202 @@ +package org.openecomp.sdc.asdctool.impl; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload; +import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao; +import org.openecomp.sdc.be.dao.jsongraph.GraphVertex; +import org.openecomp.sdc.be.dao.jsongraph.TitanDao; +import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum; +import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum; +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.dao.titan.TitanOperationStatus; +import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition; +import org.openecomp.sdc.be.datatypes.elements.GroupDataDefinition; +import org.openecomp.sdc.be.datatypes.elements.MapArtifactDataDefinition; +import org.openecomp.sdc.be.datatypes.elements.MapGroupsDataDefinition; +import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum; +import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition; +import org.openecomp.sdc.be.model.ArtifactDefinition; +import org.openecomp.sdc.be.model.Component; +import org.openecomp.sdc.be.model.ComponentInstance; +import org.openecomp.sdc.be.model.ComponentParametersView; +import org.openecomp.sdc.be.model.DistributionStatusEnum; +import org.openecomp.sdc.be.model.GroupDefinition; +import org.openecomp.sdc.be.model.GroupInstance; +import org.openecomp.sdc.be.model.LifecycleStateEnum; +import org.openecomp.sdc.be.model.Resource; +import org.openecomp.sdc.be.model.Service; +import org.openecomp.sdc.be.model.jsontitan.datamodel.TopologyTemplate; +import org.openecomp.sdc.be.model.jsontitan.datamodel.ToscaElement; +import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade; +import org.openecomp.sdc.be.model.jsontitan.utils.ModelConverter; +import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus; +import org.openecomp.sdc.be.resources.data.ESArtifactData; +import org.openecomp.sdc.be.tosca.CsarUtils; +import org.openecomp.sdc.be.tosca.ToscaError; +import org.openecomp.sdc.be.tosca.ToscaExportHandler; +import org.openecomp.sdc.be.tosca.ToscaRepresentation; +import org.openecomp.sdc.common.api.ArtifactTypeEnum; +import org.openecomp.sdc.common.api.Constants; +import org.openecomp.sdc.common.util.GeneralUtility; +import org.openecomp.sdc.exception.ResponseFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +import fj.data.Either; + +@org.springframework.stereotype.Component("artifactUuidFix") +public class ArtifactUuidFix { + + @Autowired + private TitanDao titanDao; + + @Autowired + private ToscaOperationFacade toscaOperationFacade; + @Autowired + private ToscaExportHandler toscaExportUtils; + @Autowired + private ArtifactCassandraDao artifactCassandraDao; + + @Autowired + private CsarUtils csarUtils; + + private static Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class.getName()); + + public boolean doFix(String fixComponent, String runMode) { + List vfLst = new ArrayList<>(); + List serviceList = new ArrayList<>(); + Map> nodeToFixTosca = new HashMap<>(); + Map> vfToFixTosca = new HashMap<>(); + Map> serviceToFixTosca = new HashMap<>(); + + long time = System.currentTimeMillis(); + + doFixTosca(nodeToFixTosca, vfToFixTosca, serviceToFixTosca); + + if (fixComponent.equals("vf_only")) { + if (fetchFaultVf(fixComponent, vfLst, time) == false) { + return false; + } + } else { + if (fetchServices(fixComponent, serviceList, time) == false) { + return false; + } + } + if (runMode.equals("service_vf") || runMode.equals("fix")) { + log.info("Mode {}. Find problem VFs", runMode); + if (fetchVf(serviceList, vfLst, time) == false) { + log.info("Mode {}. Find problem VFs finished with failure", runMode); + return false; + } + log.info("Mode {}. Find problem VFs finished with success", runMode); + } + if (runMode.equals("fix") || runMode.equals("fix_only_services")) { + log.info("Mode {}. Start fix", runMode); + if (fix(vfLst, serviceList, nodeToFixTosca, vfToFixTosca, serviceToFixTosca) == false) { + log.info("Mode {}. Fix finished with failure", runMode); + return false; + } + log.info("Mode {}. Fix finished with success", runMode); + } + + return true; + } + + private boolean fetchFaultVf(String fixComponent, List vfLst, long time) { + log.info("Find fault VF "); + Writer writer = null; + try { + String fileName = "fault_" + time + ".csv"; + writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8")); + writer.write("vf name, vf id, state, version\n"); + + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); + hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF.name()); + + Map hasNotProps = new HashMap<>(); + hasNotProps.put(GraphPropertyEnum.IS_DELETED, true); + log.info("Try to fetch resources with properties {} and not {}", hasProps, hasNotProps); + + Either, TitanOperationStatus> servicesByCriteria = titanDao + .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll); + if (servicesByCriteria.isRight()) { + log.info("Failed to fetch resources {}", servicesByCriteria.right().value()); + return false; + } + List resources = servicesByCriteria.left().value(); + for (GraphVertex gv : resources) { + ComponentParametersView filter = new ComponentParametersView(true); + filter.setIgnoreComponentInstances(false); + filter.setIgnoreArtifacts(false); + filter.setIgnoreGroups(false); + + Either toscaElement = toscaOperationFacade + .getToscaElement(gv.getUniqueId()); + if (toscaElement.isRight()) { + log.info("Failed to fetch resources {} {}", gv.getUniqueId(), toscaElement.right().value()); + return false; + } + + Resource resource = toscaElement.left().value(); + String resourceName = resource.getName(); + Map deploymentArtifacts = resource.getDeploymentArtifacts(); + List groups = resource.getGroups(); + if (groups == null || groups.isEmpty()) { + log.info("No groups for resource {} id {} ", resourceName, gv.getUniqueId()); + continue; + } + boolean isProblematic = false; + for (GroupDefinition gr : groups) { + if (gr.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) { + if (isProblematicGroup(gr, resourceName, deploymentArtifacts)) { + isProblematic = true; + break; + } + } + } + if (isProblematic) { + vfLst.add(resource); + writeModuleResultToFile(writer, resource, null); + writer.flush(); + } + titanDao.commit(); + } + + } catch (Exception e) { + log.info("Failed to fetch vf resources ", e); + return false; + } finally { + titanDao.commit(); + try { + writer.flush(); + writer.close(); + } catch (Exception ex) { + /* ignore */ + } + } + return true; + } + + private boolean fetchVf(List serviceList, List vfLst, long time) { + log.info("Find problem VF "); + if (serviceList.isEmpty()) { + log.info("No services as input"); + return true; + } + Writer writer = null; + try { + String fileName = "problemVf_" + time + ".csv"; + writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8")); + writer.write("vf name, vf id, state, version, example service name\n"); + Set vfIds = new HashSet<>(); + for (Service service : serviceList) { + List componentInstances = service.getComponentInstances().stream() + .filter(ci -> ci.getOriginType().equals(OriginTypeEnum.VF)).collect(Collectors.toList()); + for (ComponentInstance ci : componentInstances) { + if (!vfIds.contains(ci.getComponentUid())) { + vfIds.add(ci.getComponentUid()); + Either toscaElement = toscaOperationFacade + .getToscaElement(ci.getComponentUid()); + if (toscaElement.isRight()) { + log.info("Failed to fetch resource {} {}", ci.getComponentUid(), + toscaElement.right().value()); + return false; + } + Resource resource = toscaElement.left().value(); + if (resource.getResourceType().equals(ResourceTypeEnum.VF)) { + vfLst.add(resource); + writeModuleResultToFile(writer, resource, service); + writer.flush(); + titanDao.commit(); + } + } + } + } + log.info("output file with list of Vf : {}", fileName); + } catch (Exception e) { + log.info("Failed to fetch services ", e); + return false; + } finally { + titanDao.commit(); + try { + writer.flush(); + writer.close(); + } catch (Exception ex) { + /* ignore */ + } + } + return true; + } + + private boolean fetchServices(String fixServices, List serviceList, long time) { + log.info("Find problem Services {}", fixServices); + Writer writer = null; + + try { + String fileName = "problemService_" + time + ".csv"; + writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8")); + writer.write("service name, service id, state, version\n"); + + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name()); + if (fixServices.equals("distributed_only")) { + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name()); + } + + Map hasNotProps = new HashMap<>(); + hasNotProps.put(GraphPropertyEnum.IS_DELETED, true); + log.info("Try to fetch services with properties {} and not {}", hasProps, hasNotProps); + + Either, TitanOperationStatus> servicesByCriteria = titanDao + .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll); + if (servicesByCriteria.isRight()) { + log.info("Failed to fetch services {}", servicesByCriteria.right().value()); + return false; + } + List services = servicesByCriteria.left().value(); + for (GraphVertex gv : services) { + ComponentParametersView filter = new ComponentParametersView(true); + filter.setIgnoreComponentInstances(false); + filter.setIgnoreArtifacts(false); + filter.setIgnoreGroups(false); + + Either toscaElement = toscaOperationFacade + .getToscaElement(gv.getUniqueId()); + if (toscaElement.isRight()) { + log.info("Failed to fetch service {} {}", gv.getUniqueId(), toscaElement.right().value()); + continue; + } + Service service = toscaElement.left().value(); + List componentInstances = service.getComponentInstances(); + boolean isProblematic = false; + if (componentInstances == null) { + log.info("No instances for service {} ", gv.getUniqueId()); + continue; + } + String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME); + + for (ComponentInstance ci : componentInstances) { + Map deploymentArtifacts = ci.getDeploymentArtifacts(); + List groupInstances = ci.getGroupInstances(); + if (groupInstances == null || groupInstances.isEmpty()) { + log.info("No instance groups for instance {} in service {} id {} ", ci.getName(), serviceName, + gv.getUniqueId()); + continue; + } + + for (GroupInstance gi : groupInstances) { + if (gi.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) { + if (isProblematicGroupInstance(gi, ci.getName(), serviceName, deploymentArtifacts)) { + isProblematic = true; + break; + } + } + } + if (isProblematic) { + serviceList.add(service); + writeModuleResultToFile(writer, service, null); + writer.flush(); + break; + } + } + titanDao.commit(); + } + log.info("output file with list of services : {}", fileName); + } catch (Exception e) { + log.info("Failed to fetch services ", e); + return false; + } finally { + titanDao.commit(); + try { + writer.flush(); + writer.close(); + } catch (Exception ex) { + /* ignore */ + } + } + return true; + } + + private boolean isProblematicGroup(GroupDefinition gr, String resourceName, + Map deploymentArtifacts) { + List artifacts = gr.getArtifacts(); + List artifactsUuid = gr.getArtifactsUuid(); + Set artifactsSet = new HashSet<>(); + + if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) { + log.info("No groups in resource {} ", resourceName); + return true; + } + artifactsSet.addAll(artifacts); + if (artifactsSet.size() < artifacts.size()) { + log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", gr.getName(), resourceName); + return true; + } + + if (artifacts.size() < artifactsUuid.size()) { + log.info(" artifacts.size() < artifactsUuid.size() group {} in resource {} ", gr.getName(), resourceName); + return true; + } + if (artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty())) { + log.info( + " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() group {} in resource {} ", + gr.getName(), resourceName); + return true; + } + if (artifactsUuid.contains(null)) { + log.info(" artifactsUuid.contains(null) group {} in resource {} ", gr.getName(), resourceName); + return true; + } + + for (String artifactId : artifacts) { + String artifactlabel = findArtifactLabelFromArtifactId(artifactId); + ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel); + if (artifactDefinition == null) { + log.info(" artifactDefinition == null label {} group {} in resource {} ", artifactlabel, gr.getName(), + resourceName); + return true; + } + ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType()); + if (artifactType != ArtifactTypeEnum.HEAT_ENV) { + if (!artifactId.equals(artifactDefinition.getUniqueId())) { + log.info( + " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} group {} in resource {} ", + artifactlabel, artifactId, gr.getName(), resourceName); + return true; + } + if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) { + log.info( + " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} group {} in resource {} ", + artifactlabel, gr.getName(), resourceName); + return true; + } + } + } + for (String artifactUUID : artifactsUuid) { + String label = findArtifactLabelFromArtifactId(artifactUUID); + if (label != null && !label.isEmpty() && !label.equals("")) { + return true; + } + } + + return false; + } + + private boolean isProblematicGroupInstance(GroupInstance gi, String instName, String servicename, + Map deploymentArtifacts) { + List artifacts = gi.getArtifacts(); + List artifactsUuid = gi.getArtifactsUuid(); + List instArtifactsUuid = gi.getGroupInstanceArtifactsUuid(); + List instArtifactsId = gi.getGroupInstanceArtifacts(); + Set instArtifatIdSet = new HashSet<>(); + Set artifactsSet = new HashSet<>(); + + if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) { + log.info("No instance groups for instance {} in service {} ", instName, servicename); + return true; + } + artifactsSet.addAll(artifacts); + if (artifactsSet.size() < artifacts.size()) { + log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", instName, servicename); + return true; + } + + if (instArtifactsId != null && !instArtifactsId.isEmpty()) { + instArtifatIdSet.addAll(instArtifactsId); + } + + if (artifacts.size() < artifactsUuid.size()) { + log.info(" artifacts.size() < artifactsUuid.size() inst {} in service {} ", instName, servicename); + return true; + } + if (!artifacts.isEmpty() && (artifactsUuid == null || artifactsUuid.isEmpty())) { + log.info( + " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() inst {} in service {} ", + instName, servicename); + return true; + } + if (artifactsUuid.contains(null)) { + log.info(" artifactsUuid.contains(null) inst {} in service {} ", instName, servicename); + return true; + } + if (instArtifactsId != null && instArtifatIdSet.size() < instArtifactsId.size()) { + log.info(" instArtifatIdSet.size() < instArtifactsId.size() inst {} in service {} ", instName, servicename); + return true; + } + + if ((instArtifactsId != null && instArtifactsUuid != null) + && instArtifactsId.size() != instArtifactsUuid.size()) { + log.info(" instArtifactsId.size() != instArtifactsUuid.size() inst {} in service {} ", instName, + servicename); + return true; + } + + for (String artifactId : artifacts) { + String artifactlabel = findArtifactLabelFromArtifactId(artifactId); + ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel); + if (artifactDefinition == null) { + log.info(" artifactDefinition == null label {} inst {} in service {} ", artifactlabel, instName, + servicename); + return true; + } + ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType()); + if (artifactType != ArtifactTypeEnum.HEAT_ENV) { + if (!artifactId.equals(artifactDefinition.getUniqueId())) { + log.info( + " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} inst {} in service {} ", + artifactlabel, artifactId, instName, servicename); + return true; + } + if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) { + log.info( + " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ", + artifactlabel, instName, servicename); + return true; + } + } else { + if (instArtifactsUuid == null || instArtifactsUuid.isEmpty()) { + log.info(" instArtifactsUuid empty. label {} inst {} in service {} ", artifactlabel, instName, + servicename); + return true; + } + if (!instArtifactsUuid.contains(artifactDefinition.getArtifactUUID())) { + log.info( + " instArtifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ", + artifactlabel, instName, servicename); + return true; + } + } + } + for (String artifactUUID : artifactsUuid) { + String label = findArtifactLabelFromArtifactId(artifactUUID); + if (label != null && !label.isEmpty() && !label.equals("")) { + return true; + } + } + return false; + } + + private boolean fix(List vfLst, List serviceList, Map> nodesToFixTosca, + Map> vfToFixTosca, Map> servicesToFixTosca) { + boolean res = true; + log.info(" Fix started ***** "); + if (vfLst != null && !vfLst.isEmpty()) { + res = fixVf(vfLst); + + } + + if (res && serviceList != null && !serviceList.isEmpty()) { + res = fixServices(serviceList); + + } + + Set fixedIds = new HashSet<>(); + if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) { + + generateAndSaveToscaArtifacts(nodesToFixTosca, fixedIds, null); + + for (Map.Entry> entry : nodesToFixTosca.entrySet()) { + List components = entry.getValue(); + for (Component c : components) { + + ToscaElement topologyTemplate = ModelConverter.convertToToscaElement(c); + Map arifacts = topologyTemplate.getToscaArtifacts(); + res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, + arifacts); + titanDao.commit(); + } + } + + } + if (res && vfToFixTosca != null && !vfToFixTosca.isEmpty()) { + + generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst); + + for (Map.Entry> entry : vfToFixTosca.entrySet()) { + List components = entry.getValue(); + for (Component c : components) { + TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(c); + Map arifacts = topologyTemplate.getToscaArtifacts(); + res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, + arifacts); + titanDao.commit(); + } + } + + } + + if (res && servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) { + generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList); + + for (Map.Entry> entry : servicesToFixTosca.entrySet()) { + List components = entry.getValue(); + for (Component c : components) { + TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(c); + Map arifacts = topologyTemplate.getToscaArtifacts(); + res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, + arifacts); + titanDao.commit(); + } + } + + } + + if (res) { + + for (Component component : vfLst) { + generateToscaPerComponent(fixedIds, component); + + TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component); + Map groups = topologyTemplate.getGroups(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.GROUPS, EdgeLabelEnum.GROUPS, groups); + if (res) { + Map arifacts = topologyTemplate.getDeploymentArtifacts(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.DEPLOYMENT_ARTIFACTS, + EdgeLabelEnum.DEPLOYMENT_ARTIFACTS, arifacts); + } + if (res) { + Map arifacts = topologyTemplate.getToscaArtifacts(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, + EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts); + } + titanDao.commit(); + } + } + + if (res) { + + for (Component component : serviceList) { + generateToscaPerComponent(fixedIds, component); + + TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component); + Map groups = topologyTemplate.getInstGroups(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_GROUPS, EdgeLabelEnum.INST_GROUPS, + groups); + + if (res) { + Map artifacts = topologyTemplate.getInstDeploymentArtifacts(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_DEPLOYMENT_ARTIFACTS, + EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, artifacts); + } + if (res) { + Map arifacts = topologyTemplate.getToscaArtifacts(); + res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, + EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts); + } + titanDao.commit(); + } + + } + log.info(" Fix finished with res {} ***** ", res); + return res; + } + + private boolean generateAndSaveToscaArtifacts(Map> nodesToFixTosca, Set fixedIds, + List componentsWithFailedGroups) { + boolean res = true; + log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts started "); + for (Map.Entry> entry : nodesToFixTosca.entrySet()) { + + List component = entry.getValue(); + for (Component c : component) { + log.debug("Migration1707ArtifactUuidFix fix tosca on component : id {}, name {} ", c.getUniqueId(), + c.getName()); + if (componentsWithFailedGroups != null) { + Optional op = (Optional) componentsWithFailedGroups.stream() + .filter(cg -> cg.getUniqueId().equals(c.getUniqueId())).findAny(); + if (!op.isPresent()) + res = generateToscaPerComponent(fixedIds, c); + } else + res = generateToscaPerComponent(fixedIds, c); + } + } + log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts finished with res {} ", res); + return res; + } + + private boolean generateToscaPerComponent(Set fixedIds, Component c) { + boolean res = true; + log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent started component name {} id {}", + c.getName(), c.getUniqueId()); + try { + Either toscaElement = toscaOperationFacade + .getToscaFullElement(c.getUniqueId()); + if (toscaElement.isRight()) { + log.info("Failed to fetch resources {} {}", c.getUniqueId(), toscaElement.right().value()); + return false; + } + Component toscaElementFull = toscaElement.left().value(); + toscaElementFull.setGroups(c.getGroups()); + List ciListFull = toscaElementFull.getComponentInstances(); + List ciList = c.getComponentInstances(); + if (ciListFull != null && !ciListFull.isEmpty()) { + ciListFull.forEach(ciFull -> { + ComponentInstance compInst = ciList.stream() + .filter(ci -> ci.getUniqueId().equals(ciFull.getUniqueId())).findAny().get(); + ciFull.setGroupInstances(compInst.getGroupInstances()); + }); + } + + Map toscaArtifacts = c.getToscaArtifacts(); + log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent tocsa artifacts size {}", + toscaArtifacts.size()); + + Either either = Either.right(ToscaError.GENERAL_ERROR); + ArtifactDefinition toscaArtifact = null; + Optional op = toscaArtifacts.values().stream() + .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_TEMPLATE.getType())).findAny(); + + if (op.isPresent()) { + toscaArtifact = op.get(); + } + + if (toscaArtifact != null) { + log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent artifact name {} id {} esId {}", + toscaArtifact.getArtifactName(), toscaArtifact.getUniqueId(), toscaArtifact.getEsId()); + either = generateToscaArtifact(toscaElementFull, toscaArtifact); + if (either.isRight()) { + log.error("Couldn't generate and save tosca template component unique id {}, name {} error: {}", + toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value()); + res = false; + + } + } + if (res) { + + ArtifactDefinition csarArtifact = null; + op = toscaArtifacts.values().stream() + .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())).findAny(); + + if (op.isPresent()) { + csarArtifact = op.get(); + } + + if (csarArtifact != null) { + log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent artifact name {} id {} esId {}", + csarArtifact.getArtifactName(), csarArtifact.getUniqueId(), csarArtifact.getEsId()); + either = generateToscaArtifact(toscaElementFull, csarArtifact); + if (either.isRight()) { + log.error("Couldn't generate and save tosca csar for component uuid {}, id {}, name {}. error: {}", + toscaElementFull.getUUID(), toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value()); + res = false; + + } + } + } + c.setToscaArtifacts(toscaArtifacts); + + if (res) { + fixedIds.add(toscaElementFull.getUniqueId()); + } + } finally { + titanDao.commit(); + } + log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent finished component name {} id {} res {}", + c.getName(), c.getUniqueId(), res); + return res; + } + + private boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum, + EdgeLabelEnum edgeLabelEnum, Map groups) { + log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(), + edgeLabelEnum, groups.size()); + boolean res = true; + Either getResponse = titanDao.getVertexById(componentId, + JsonParseFlagEnum.NoParse); + if (getResponse.isRight()) { + log.debug("Couldn't fetch component unique id {}, error: {}", componentId, getResponse.right().value()); + res = false; + + } + if (res) { + GraphVertex componentVertex = getResponse.left().value(); + + GraphVertex toscaDataVertex = null; + Either groupVertexEither = titanDao.getChildVertex(componentVertex, + edgeLabelEnum, JsonParseFlagEnum.ParseJson); + if (groupVertexEither.isRight() && groupVertexEither.right().value() == TitanOperationStatus.NOT_FOUND) { + log.debug("no child {} vertex for component unique id {}, error: {}", edgeLabelEnum, componentId, + groupVertexEither.right().value()); + return true; + } + if (groupVertexEither.isRight()) { + res = false; + log.debug("failed to get child {} vertex for component unique id {}, error: {}", edgeLabelEnum, + componentId, groupVertexEither.right().value()); + } + if (res) { + toscaDataVertex = groupVertexEither.left().value(); + toscaDataVertex.setJson(groups); + Either updatevertexEither = titanDao.updateVertex(toscaDataVertex); + if (updatevertexEither.isRight()) { + log.debug("failed to update vertex for component unique id {}, error: {}", componentId, + updatevertexEither.right().value()); + titanDao.rollback(); + return false; + } + } + } + log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), + res); + return res; + } + + private boolean fixServices(List serviceList) { + for (Service service : serviceList) { + log.debug("Migration1707ArtifactUuidFix fix service: id {}, name {} ", service.getUniqueId(), + service.getName()); + List instances = service.getComponentInstances(); + for (ComponentInstance instance : instances) { + fixComponentInstances(service, instance); + } + + } + return true; + + } + + private void fixComponentInstances(Service service, ComponentInstance instance) { + Map artifactsMap = instance.getDeploymentArtifacts(); + List groupsList = instance.getGroupInstances(); + if (groupsList != null && artifactsMap != null) { + List groupsToDelete = new ArrayList<>(); + for (GroupInstance group : groupsList) { + fixGroupInstances(service, artifactsMap, groupsToDelete, group); + + } + + if (!groupsToDelete.isEmpty()) { + log.debug("Migration1707ArtifactUuidFix delete group: resource id {}, group instance to delete {} ", + service.getUniqueId(), groupsToDelete); + groupsList.removeAll(groupsToDelete); + + } + + Optional optionalVfModuleArtifact = artifactsMap.values().stream() + .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny(); + if (optionalVfModuleArtifact.isPresent()) { + ArtifactDefinition vfModuleAertifact = optionalVfModuleArtifact.get(); + fillVfModuleInstHeatEnvPayload(groupsList, vfModuleAertifact); + } + } + } + + private void fixGroupInstances(Service service, Map artifactsMap, + List groupsToDelete, GroupInstance group) { + if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) { + log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", service.getUniqueId(), + group.getName()); + List groupArtifacts = new ArrayList(group.getArtifacts()); + + group.getArtifacts().clear(); + group.getArtifactsUuid().clear(); + group.getGroupInstanceArtifacts().clear(); + group.getGroupInstanceArtifactsUuid().clear(); + + for (String artifactId : groupArtifacts) { + fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId); + } + if (group.getArtifacts() == null || group.getArtifacts().isEmpty()) { + log.debug( + "Migration1707ArtifactUuidFix fix groupInstance add to delete list: resource id {} name {} , group name {} ", + service.getUniqueId(), service.getName(), group.getName()); + groupsToDelete.add(group); + } + } + } + + private void fixArtifactUndergroupInstances(Map artifactsMap, GroupInstance group, + List groupArtifacts, String artifactId) { + String artifactlabel = findArtifactLabelFromArtifactId(artifactId); + log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ", + group.getName(), artifactId, artifactlabel); + if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) { + ArtifactDefinition artifact = artifactsMap.get(artifactlabel); + ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType()); + String correctArtifactId = artifact.getUniqueId(); + String correctArtifactUUID = artifact.getArtifactUUID(); + if (artifactType != ArtifactTypeEnum.HEAT_ENV) { + boolean isAddToGroup = true; + if (groupArtifacts.size() == 1) { + + if (artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) { + isAddToGroup = false; + artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType()); + } + } + if (isAddToGroup) { + log.debug( + "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ", + group.getName(), correctArtifactId, correctArtifactUUID); + group.getArtifacts().add(correctArtifactId); + if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) { + group.getArtifactsUuid().add(correctArtifactUUID); + } + } + } else { + log.debug( + "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ", + group.getName(), correctArtifactId, correctArtifactUUID); + group.getGroupInstanceArtifacts().add(correctArtifactId); + if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) { + group.getGroupInstanceArtifactsUuid().add(correctArtifactUUID); + } + } + } + } + + private boolean fixVf(List vfLst) { + for (Resource resource : vfLst) { + log.debug("Migration1707ArtifactUuidFix fix resource: id {}, name {} ", resource.getUniqueId(), + resource.getName()); + Map artifactsMap = resource.getDeploymentArtifacts(); + List groupsList = resource.getGroups(); + List groupsToDelete = new ArrayList<>(); + if (groupsList != null && artifactsMap != null) { + for (GroupDefinition group : groupsList) { + if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) { + fixVfGroup(resource, artifactsMap, group); + } + if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) + && (group.getArtifacts() == null || group.getArtifacts().isEmpty())) { + log.debug( + "Migration1707ArtifactUuidFix add group to delete list fix resource: id {}, name {} ", + resource.getUniqueId(), resource.getName(), group.getName()); + groupsToDelete.add(group); + } + } + + if (!groupsToDelete.isEmpty()) { + groupsList.removeAll(groupsToDelete); + + } + } + + } + + return true; + } + + private void fixVfGroup(Resource resource, Map artifactsMap, GroupDefinition group) { + log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", resource.getUniqueId(), + group.getName()); + List groupArtifacts = new ArrayList<>(group.getArtifacts()); + + for (String artifactId : groupArtifacts) { + fixArtifactUnderGroup(artifactsMap, group, groupArtifacts, artifactId); + } + } + + private void fixArtifactUnderGroup(Map artifactsMap, GroupDefinition group, + List groupArtifacts, String artifactId) { + group.getArtifacts().clear(); + group.getArtifactsUuid().clear(); + String artifactlabel = findArtifactLabelFromArtifactId(artifactId); + log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ", + group.getName(), artifactId, artifactlabel); + if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) { + ArtifactDefinition artifact = artifactsMap.get(artifactlabel); + String correctArtifactId = artifact.getUniqueId(); + String correctArtifactUUID = artifact.getArtifactUUID(); + boolean isAddToGroup = true; + if (groupArtifacts.size() == 1) { + ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType()); + if (artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) { + isAddToGroup = false; + artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType()); + } + } + if (isAddToGroup) { + log.debug( + "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ", + group.getName(), correctArtifactId, correctArtifactUUID); + group.getArtifacts().add(correctArtifactId); + if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) { + group.getArtifactsUuid().add(correctArtifactUUID); + } + } + + } + } + + private String findArtifactLabelFromArtifactId(String artifactId) { + String artifactLabel = ""; + + int index = artifactId.lastIndexOf('.'); + if (index > 0 && index + 1 < artifactId.length()) + artifactLabel = artifactId.substring(index + 1); + return artifactLabel; + } + + private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component, + Service service) { + try { + // "service name, service id, state, version + StringBuilder sb = new StringBuilder(component.getName()); + sb.append(",").append(component.getUniqueId()).append(",").append(component.getLifecycleState()).append(",") + .append(component.getVersion()); + if (service != null) { + sb.append(",").append(service.getName()); + } + sb.append("\n"); + writer.write(sb.toString()); + } catch (IOException e) { + log.error(e.getMessage()); + } + } + + private void writeModuleResultToFile(Writer writer, List components) { + try { + // "service name, service id, state, version + for (Component component : components) { + StringBuilder sb = new StringBuilder(component.getName()); + sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()) + .append(",").append(component.getLifecycleState()).append(",").append(component.getVersion()); + + sb.append("\n"); + writer.write(sb.toString()); + } + } catch (IOException e) { + + log.error(e.getMessage()); + } + } + + public boolean doFixTosca(Map> nodeToFix, Map> vfToFix, + Map> serviceToFix) { + + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + Map> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps); + boolean result = validateTosca(vertices, nodeToFix, "RESOURCE_TOSCA_ARTIFACTS");// + + hasProps.clear(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); + hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps); + result = validateTosca(vertices, vfToFix, "VF_TOSCA_ARTIFACTS"); + + hasProps.clear(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name()); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps); + result = validateTosca(vertices, serviceToFix, "SERVICE_TOSCA_ARTIFACTS"); + + return result; + } + + public Map> getVerticesToValidate(VertexTypeEnum type, + Map hasProps) { + + Map> result = new HashMap<>(); + try { + + Either, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps); + if (resultsEither.isRight()) { + System.out.println("getVerticesToValidate failed " + resultsEither.right().value()); + return result; + } + System.out.println("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan"); + List componentsList = resultsEither.left().value(); + componentsList.forEach(vertex -> { + String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID); + if (!result.containsKey(ivariantUuid)) { + List compList = new ArrayList(); + result.put(ivariantUuid, compList); + } + List compList = result.get(ivariantUuid); + + ComponentParametersView filter = new ComponentParametersView(true); + filter.setIgnoreArtifacts(false); + + Either toscaElement = toscaOperationFacade + .getToscaElement(vertex.getUniqueId(), filter); + if (toscaElement.isRight()) { + System.out.println("getVerticesToValidate: failed to find element" + vertex.getUniqueId() + + " staus is" + toscaElement.right().value()); + } else { + compList.add(toscaElement.left().value()); + } + titanDao.commit(); + + }); + + } catch (Exception e) { + log.info("Failed to fetch vf resources ", e); + + } finally { + titanDao.commit(); + + } + return result; + } + + public boolean validateTosca(Map> vertices, Map> compToFix, + String name) { + boolean result = true; + long time = System.currentTimeMillis(); + String fileName = name + "_" + time + ".csv"; + Writer writer = null; + try { + writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8")); + writer.write("name, UUID, invariantUUID, state, version\n"); + for (Map.Entry> entry : vertices.entrySet()) { + List compList = entry.getValue(); + Set artifactEsId = new HashSet<>(); + for (Component component : compList) { + Map toscaArtifacts = component.getToscaArtifacts(); + Optional op = toscaArtifacts.values().stream() + .filter(a -> artifactEsId.contains(a.getEsId()) && a.getEsId() != null).findAny(); + if (op.isPresent()) { + result = false; + writeModuleResultToFile(writer, compList); + writer.flush(); + break; + } else { + artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId) + .collect(Collectors.toList())); + } + } + if (!result) { + List compListfull = new ArrayList<>(); + for (Component c : compList) { + ComponentParametersView filter = new ComponentParametersView(true); + filter.setIgnoreComponentInstances(false); + filter.setIgnoreArtifacts(false); + filter.setIgnoreGroups(false); + + Either toscaElement = toscaOperationFacade + .getToscaElement(c.getUniqueId(), filter); + if (toscaElement.isRight()) { + System.out.println("getVerticesToValidate: failed to find element" + c.getUniqueId() + + " staus is" + toscaElement.right().value()); + } else { + compListfull.add(toscaElement.left().value()); + } + this.titanDao.commit(); + } + + compToFix.put(entry.getKey(), compListfull); + result = true; + } + + } + + } catch (Exception e) { + log.info("Failed to fetch vf resources ", e); + return false; + } finally { + titanDao.commit(); + try { + writer.flush(); + writer.close(); + } catch (Exception ex) { + /* ignore */} + } + return result; + } + + private Either generateToscaArtifact(Component parent, + ArtifactDefinition artifactInfo) { + log.debug("tosca artifact generation"); + try { + if (artifactInfo.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())) { + Either generated = csarUtils.createCsar(parent, true, true); + + if (generated.isRight()) { + log.debug("Failed to export tosca csar for component {} error {}", parent.getUniqueId(), + generated.right().value()); + + return Either.right(ToscaError.GENERAL_ERROR); + } + byte[] value = generated.left().value(); + artifactInfo.setPayload(value); + + } else { + Either exportComponent = toscaExportUtils.exportComponent(parent); + if (exportComponent.isRight()) { + log.debug("Failed export tosca yaml for component {} error {}", parent.getUniqueId(), + exportComponent.right().value()); + + return Either.right(exportComponent.right().value()); + } + log.debug("Tosca yaml exported for component {} ", parent.getUniqueId()); + String payload = exportComponent.left().value().getMainYaml(); + + artifactInfo.setPayloadData(payload); + } + + byte[] decodedPayload = artifactInfo.getPayloadData(); + artifactInfo.setEsId(artifactInfo.getUniqueId()); + artifactInfo.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload)); + ESArtifactData artifactData = new ESArtifactData(artifactInfo.getEsId(), decodedPayload); + artifactCassandraDao.saveArtifact(artifactData); + log.debug("Tosca yaml artifact esId ", artifactInfo.getEsId()); + } catch (Exception ex) { + log.error("Failed to generate tosca atifact id {} component id {} component name {} error {}",artifactInfo.getUniqueId(), + parent.getUniqueId(), parent.getName(), ex.getMessage() ); + + return Either.right(ToscaError.GENERAL_ERROR); + } + + return Either.left(artifactInfo); + } + + private void fillVfModuleInstHeatEnvPayload(List groupsForCurrVF, + ArtifactDefinition vfModuleArtifact) { + + List vfModulePayloadForCurrVF = new ArrayList(); + if (groupsForCurrVF != null) { + for (GroupInstance groupInstance : groupsForCurrVF) { + VfModuleArtifactPayload modulePayload = new VfModuleArtifactPayload(groupInstance); + vfModulePayloadForCurrVF.add(modulePayload); + } + Collections.sort(vfModulePayloadForCurrVF, + (art1, art2) -> VfModuleArtifactPayload.compareByGroupName(art1, art2)); + + final Gson gson = new GsonBuilder().setPrettyPrinting().create(); + + String vfModulePayloadString = gson.toJson(vfModulePayloadForCurrVF); + if (vfModulePayloadString != null) { + String newCheckSum = GeneralUtility + .calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes()); + vfModuleArtifact.setArtifactChecksum(newCheckSum); + + ESArtifactData artifactData = new ESArtifactData(vfModuleArtifact.getEsId(), + vfModulePayloadString.getBytes()); + artifactCassandraDao.saveArtifact(artifactData); + + } + + } + + } +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java new file mode 100644 index 0000000000..556d7e0eee --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java @@ -0,0 +1,38 @@ +package org.openecomp.sdc.asdctool.impl.validator; + +import java.util.List; + +import org.openecomp.sdc.asdctool.impl.validator.executers.IArtifactValidatorExecuter; +import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +public class ArtifactToolBL { + + private static Logger log = LoggerFactory.getLogger(ValidationToolBL.class.getName()); + + @Autowired + protected List validators; + + @Autowired + protected ReportManager reportManager; + + private boolean allValid = true; + + + public boolean validateAll() { + for (IArtifactValidatorExecuter validatorExec: validators) { + System.out.println("ValidatorExecuter "+validatorExec.getName()+" started"); + if (!validatorExec.executeValidations()) { + allValid = false; + System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished with warnings"); + } + else { + System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished successfully"); + } + } + return allValid; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java index 79b36f0004..90d850686b 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java @@ -11,16 +11,22 @@ import java.util.Properties; public class ValidationConfigManager { private static Properties prop = new Properties(); + private static String outputFullFilePath; + private static String outputFilePath; + public static String getOutputFullFilePath() { + return outputFullFilePath; + } public static String getOutputFilePath() { return outputFilePath; } - public static void setOutputFilePath(String outputPath) { - ValidationConfigManager.outputFilePath = outputPath+ "/reportOutput.txt"; + public static void setOutputFullFilePath(String outputPath) { + ValidationConfigManager.outputFilePath = outputPath; + ValidationConfigManager.outputFullFilePath = outputPath+ "/reportOutput.txt"; } - private static String outputFilePath; + public static String getCsvReportFilePath() { return csvReportFilePath; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java index 3b81ba81e4..63e95d568b 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java @@ -1,7 +1,11 @@ package org.openecomp.sdc.asdctool.impl.validator.config; +import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL; import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL; +import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter; +import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor; import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter; +import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor; import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter; import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils; import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask; @@ -37,6 +41,15 @@ public class ValidationToolConfiguration { @Bean public ServiceValidatorExecuter basicServiceValidator() { return new ServiceValidatorExecuter();} + + @Bean + public NodeToscaArtifactsValidatorExecuter NodeToscaArtifactsValidatorValidator() { return new NodeToscaArtifactsValidatorExecuter();} + + @Bean + public ServiceToscaArtifactsValidatorExecutor ServiceToscaArtifactsValidator() { return new ServiceToscaArtifactsValidatorExecutor();} + + @Bean + public VFToscaArtifactValidatorExecutor VFToscaArtifactValidator() { return new VFToscaArtifactValidatorExecutor();} @Bean public VfArtifactValidationTask vfArtifactValidationTask() { return new VfArtifactValidationTask(); } @@ -51,6 +64,11 @@ public class ValidationToolConfiguration { public ValidationToolBL validationToolBL() { return new ValidationToolBL(); } + + @Bean + public ArtifactToolBL artifactToolBL() { + return new ArtifactToolBL(); + } @Bean public VfValidatorExecuter basicVfValidator() { return new VfValidatorExecuter();} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java new file mode 100644 index 0000000000..4b9764d2d5 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java @@ -0,0 +1,145 @@ +package org.openecomp.sdc.asdctool.impl.validator.executers; + +import java.io.BufferedWriter; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager; +import org.openecomp.sdc.be.dao.jsongraph.GraphVertex; +import org.openecomp.sdc.be.dao.jsongraph.TitanDao; +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.dao.titan.TitanOperationStatus; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.model.ArtifactDefinition; +import org.openecomp.sdc.be.model.Component; +import org.openecomp.sdc.be.model.ComponentParametersView; +import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade; +import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import fj.data.Either; + +public class ArtifactValidatorExecuter{ + + @Autowired + protected TitanDao titanDao; + + @Autowired + private ToscaOperationFacade toscaOperationFacade; + private static Logger log = LoggerFactory.getLogger(ArtifactValidatorExecuter.class.getName()); + + protected String name; + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + + + public Map> getVerticesToValidate(VertexTypeEnum type, Map hasProps){ + Map> result = new HashMap<>(); + Either, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps); + if (resultsEither.isRight()) { + System.out.println("getVerticesToValidate failed "+ resultsEither.right().value()); + return result; + } + System.out.println("getVerticesToValidate: "+resultsEither.left().value().size()+" vertices to scan"); + List componentsList = resultsEither.left().value(); + componentsList.forEach(vertex -> { + String ivariantUuid = (String)vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID); + if(!result.containsKey(ivariantUuid)){ + List compList = new ArrayList(); + result.put(ivariantUuid, compList); + } + List compList = result.get(ivariantUuid); + + ComponentParametersView filter = new ComponentParametersView(true); + filter.setIgnoreArtifacts(false); + + Either toscaElement = toscaOperationFacade.getToscaElement(vertex.getUniqueId(), filter); + if (toscaElement.isRight()) { + System.out.println("getVerticesToValidate: failed to find element"+ vertex.getUniqueId()+" staus is" + toscaElement.right().value()); + }else{ + compList.add(toscaElement.left().value()); + } + + }); + + return result; + } + + public boolean validate( Map> vertices) { + boolean result = true; + long time = System.currentTimeMillis(); + String fileName = ValidationConfigManager.getOutputFilePath() + this.getName() + "_"+ time + ".csv"; + Writer writer = null; + try { + writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8")); + writer.write("name, UUID, invariantUUID, state, version\n"); + Collection> collection = vertices.values(); + for(List compList: collection ){ + Set artifactEsId = new HashSet<>(); + for(Component component: compList ){ + Map toscaArtifacts = component.getToscaArtifacts(); + Optional op = toscaArtifacts.values(). + stream().filter(a -> artifactEsId.contains(a.getEsId())).findAny(); + if(op.isPresent()){ + result = false; + writeModuleResultToFile(writer, compList); + writer.flush(); + break; + }else{ + artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId).collect(Collectors.toList())) ; + } + } + + } + + } catch (Exception e) { + log.info("Failed to fetch vf resources ", e); + return false; + } finally { + titanDao.commit(); + try { + writer.flush(); + writer.close(); + } catch (Exception ex) { + /* ignore */} + } + return result; + } + + private void writeModuleResultToFile(Writer writer, List components) { + try { + // "service name, service id, state, version + for(Component component: components ){ + StringBuffer sb = new StringBuffer(component.getName()); + sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()).append(",").append(component.getLifecycleState()).append(",").append(component.getVersion()); + + sb.append("\n"); + writer.write(sb.toString()); + } + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java new file mode 100644 index 0000000000..6f9405f992 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java @@ -0,0 +1,9 @@ +package org.openecomp.sdc.asdctool.impl.validator.executers; + +public interface IArtifactValidatorExecuter { + boolean executeValidations(); + String getName(); + + + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java new file mode 100644 index 0000000000..6715c8a955 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java @@ -0,0 +1,41 @@ +package org.openecomp.sdc.asdctool.impl.validator.executers; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.model.Component; +import org.openecomp.sdc.be.model.LifecycleStateEnum; + +public class NodeToscaArtifactsValidatorExecuter extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{ + protected String name; + + public NodeToscaArtifactsValidatorExecuter() { + setName("RESOURCE_TOSCA_ARTIFACTS"); + } + @Override + public boolean executeValidations() { + + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + Map> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps); + return validate(vertices); + + } + + @Override + public String getName() { + return name; + } + + + public void setName(String name) { + this.name = name; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java new file mode 100644 index 0000000000..2fe5abef3c --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java @@ -0,0 +1,39 @@ +package org.openecomp.sdc.asdctool.impl.validator.executers; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.model.Component; +import org.openecomp.sdc.be.model.LifecycleStateEnum; + +public class ServiceToscaArtifactsValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{ + + + public ServiceToscaArtifactsValidatorExecutor() { + setName("SERVICE_TOSCA_ARTIFACTS"); + } + @Override + public boolean executeValidations() { + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name()); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + Map> vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps); + return validate(vertices); + } + + @Override + public String getName() { + return name; + } + + + public void setName(String name) { + this.name = name; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java new file mode 100644 index 0000000000..bc22f2e424 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java @@ -0,0 +1,43 @@ +package org.openecomp.sdc.asdctool.impl.validator.executers; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum; +import org.openecomp.sdc.be.model.Component; +import org.openecomp.sdc.be.model.LifecycleStateEnum; + +public class VFToscaArtifactValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{ + + public VFToscaArtifactValidatorExecutor() { + setName("VF_TOSCA_ARTIFACTS"); + } + @Override + public boolean executeValidations() { + + Map hasProps = new HashMap<>(); + hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name()); + hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF); + hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + Map> vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps); + return validate( vertices); + + } + + @Override + public String getName() { + return name; + } + + + public void setName(String name) { + this.name = name; + } + + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java index 4a4af15fdf..a19a98ed27 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java @@ -34,7 +34,7 @@ public class ReportManager { } private void initReportFile() throws IOException { - reportOutputFilePath = ValidationConfigManager.getOutputFilePath(); + reportOutputFilePath = ValidationConfigManager.getOutputFullFilePath(); StrBuilder sb = new StrBuilder(); sb.appendln("-----------------------Validation Tool Results:-------------------------"); Files.write(Paths.get(reportOutputFilePath), sb.toString().getBytes()); diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java new file mode 100644 index 0000000000..5ca8e55412 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java @@ -0,0 +1,45 @@ +package org.openecomp.sdc.asdctool.main; + +import org.openecomp.sdc.asdctool.configuration.ArtifactUUIDFixConfiguration; +import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader; +import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; + +public class ArtifactUUIDFixMenu { + + private static Logger log = LoggerFactory.getLogger(ArtifactUUIDFixMenu.class); + + public static void main(String[] args) { + if (args == null || args.length < 3) { + System.out.println("Usage: "); + System.exit(1); + } + String fixServices = args[1]; + String runMode = args[2]; + // String fixTosca = args[3]; + log.info("Start fixing artifact UUID after 1707 migration with arguments run with configuration [{}] , for [{}] services", runMode, fixServices); + String appConfigDir = args[0]; + ConfigurationUploader.uploadConfigurationFiles(appConfigDir); + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ArtifactUUIDFixConfiguration.class); + ArtifactUuidFix artifactUuidFix = context.getBean(ArtifactUuidFix.class); + boolean isSuccessful = artifactUuidFix.doFix(fixServices, runMode); + if (isSuccessful) { + log.info("Fixing artifacts UUID for 1707 was finished successfully"); + /* isSuccessful = artifactUuidFix.doFixTosca(fixTosca, fixServices, runMode); + if (isSuccessful) { + log.info("Fixing tosca artifacts was finished successfully"); + isSuccessful = artifactUuidFix.doFixTosca(fixTosca, fixServices, runMode); + } else{ + log.info("Fixing tosca artifacts has failed"); + System.exit(2); + }*/ + } else{ + log.info("Fixing artifacts UUID for 1707 has failed"); + System.exit(2); + } + System.exit(0); + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java new file mode 100644 index 0000000000..f7e8f1fa8b --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java @@ -0,0 +1,46 @@ +package org.openecomp.sdc.asdctool.main; + +import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL; +import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager; +import org.openecomp.sdc.asdctool.impl.validator.config.ValidationToolConfiguration; +import org.openecomp.sdc.be.config.ConfigurationManager; +import org.openecomp.sdc.common.api.ConfigurationSource; +import org.openecomp.sdc.common.impl.ExternalConfiguration; +import org.openecomp.sdc.common.impl.FSConfigurationSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; + +public class ArtifactValidatorTool { + private static Logger log = LoggerFactory.getLogger(ValidationTool.class.getName()); + + public static void main(String[] args) throws Exception { + + String outputPath = args[0]; + ValidationConfigManager.setOutputFullFilePath(outputPath); + ValidationConfigManager.setCsvReportFilePath(outputPath); + + String appConfigDir = args[1]; + AnnotationConfigApplicationContext context = initContext(appConfigDir); + ArtifactToolBL validationToolBL = context.getBean(ArtifactToolBL.class); + + System.out.println("Start ArtifactValidation Tool"); + Boolean result = validationToolBL.validateAll(); + // ReportManager.reportEndOfToolRun(); + if (result) { + System.out.println("ArtifactValidation finished successfully"); + System.exit(0); + } else { + System.out.println("ArtifactValidation finished with warnings"); + System.exit(2); + } + } + + private static AnnotationConfigApplicationContext initContext(String appConfigDir) { + ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir); + ConfigurationManager configurationManager = new ConfigurationManager(configurationSource); + AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ValidationToolConfiguration.class); + return context; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java index 73b7306de4..dce9f9c072 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java @@ -25,7 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang3.tuple.ImmutablePair; import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex; import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary; import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java index 1ee006a49f..51e2d500da 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java @@ -22,7 +22,7 @@ public class ValidationTool { public static void main(String[] args) throws Exception { String outputPath = args[0]; - ValidationConfigManager.setOutputFilePath(outputPath); + ValidationConfigManager.setOutputFullFilePath(outputPath); ValidationConfigManager.setCsvReportFilePath(outputPath); String appConfigDir = args[1]; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java index 4d52347788..19651ec45a 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java @@ -1,21 +1,70 @@ package org.openecomp.sdc.asdctool.migration.config; +import java.util.ArrayList; +import java.util.List; + import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool; import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.core.task.PostMigration; +import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao; import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver; import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver; import org.openecomp.sdc.asdctool.migration.service.SdcRepoService; +import org.openecomp.sdc.be.auditing.impl.AuditingManager; +import org.openecomp.sdc.be.components.ArtifactsResolver; +import org.openecomp.sdc.be.components.distribution.engine.DistributionEngine; +import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder; +import org.openecomp.sdc.be.components.impl.AdditionalInformationBusinessLogic; +import org.openecomp.sdc.be.components.impl.ArtifactResolverImpl; +import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic; +import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic; +import org.openecomp.sdc.be.components.impl.GroupBusinessLogic; +import org.openecomp.sdc.be.components.impl.InputsBusinessLogic; +import org.openecomp.sdc.be.components.impl.ProductBusinessLogic; +import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic; +import org.openecomp.sdc.be.components.impl.ResourceImportManager; +import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic; +import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic; +import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic; +import org.openecomp.sdc.be.components.impl.generic.GenericTypeBusinessLogic; +import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic; +import org.openecomp.sdc.be.components.merge.heat.HeatEnvArtifactsMergeBusinessLogic; +import org.openecomp.sdc.be.components.merge.input.InputsValuesMergingBusinessLogic; +import org.openecomp.sdc.be.components.merge.instance.ComponentInstanceMergeDataBusinessLogic; +import org.openecomp.sdc.be.components.merge.property.DataDefinitionsValuesMergingBusinessLogic; +import org.openecomp.sdc.be.dao.DAOTitanStrategy; +import org.openecomp.sdc.be.dao.TitanClientStrategy; +import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao; import org.openecomp.sdc.be.dao.cassandra.CassandraClient; -import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao; import org.openecomp.sdc.be.dao.config.DAOSpringConfig; +import org.openecomp.sdc.be.dao.es.ElasticSearchClient; +import org.openecomp.sdc.be.dao.impl.AuditingDao; +import org.openecomp.sdc.be.dao.jsongraph.TitanDao; +import org.openecomp.sdc.be.dao.titan.TitanGenericDao; +import org.openecomp.sdc.be.dao.titan.TitanGraphClient; +import org.openecomp.sdc.be.impl.ComponentsUtils; +import org.openecomp.sdc.be.model.cache.ComponentCache; +import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade; +import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation; +import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation; +import org.openecomp.sdc.be.model.operations.impl.CsarOperation; +import org.openecomp.sdc.be.model.operations.impl.ElementOperation; +import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupInstanceOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupOperation; +import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation; +import org.openecomp.sdc.be.model.operations.impl.PropertyOperation; +import org.openecomp.sdc.be.tosca.CsarUtils; +import org.openecomp.sdc.be.tosca.ToscaExportHandler; +import org.openecomp.sdc.be.user.UserBusinessLogic; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.config.PropertiesFactoryBean; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - -import java.util.ArrayList; -import java.util.List; +import org.springframework.core.io.FileSystemResource; @Configuration @Import(DAOSpringConfig.class) @@ -23,13 +72,19 @@ import java.util.List; "org.openecomp.sdc.be.model.operations.impl", "org.openecomp.sdc.be.model.cache", "org.openecomp.sdc.be.dao.titan", + "org.openecomp.sdc.be.components.validation", "org.openecomp.sdc.be.dao.cassandra", "org.openecomp.sdc.be.model.jsontitan.operations", - "org.openecomp.sdc.be.dao.jsongraph"}) + "org.openecomp.sdc.be.dao.jsongraph", + "org.openecomp.sdc.be.components.merge", + "org.openecomp.sdc.be.impl"}) public class MigrationSpringConfig { @Autowired(required=false) private List migrations = new ArrayList<>(); + + @Autowired(required=false) + private List postMigrations = new ArrayList<>(); @Bean(name = "sdc-migration-tool") public SdcMigrationTool sdcMigrationTool(MigrationResolver migrationResolver, SdcRepoService sdcRepoService) { @@ -38,7 +93,7 @@ public class MigrationSpringConfig { @Bean(name = "spring-migrations-resolver") public SpringBeansMigrationResolver migrationResolver(SdcRepoService sdcRepoService) { - return new SpringBeansMigrationResolver(migrations, sdcRepoService); + return new SpringBeansMigrationResolver(migrations, postMigrations, sdcRepoService); } @Bean(name = "sdc-repo-service") @@ -57,5 +112,247 @@ public class MigrationSpringConfig { } + @Bean(name = "dao-titan-strategy") + public TitanClientStrategy daoStrategy() { + return new DAOTitanStrategy(); + } + + @Bean(name = "titan-dao") + public TitanDao titanDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) { + return new TitanDao(titanGraphClient); + } + + @Bean(name = "titan-client", initMethod = "createGraph") + public TitanGraphClient titanClient(@Qualifier("dao-titan-strategy") TitanClientStrategy titanClientStrategy) { + return new TitanGraphClient(titanClientStrategy); + } + + @Bean(name = "resource-business-logic") + public ResourceBusinessLogic resourceBusinessLogic() { + return new ResourceBusinessLogic(); + } + +// @Bean(name = "healthCheckBusinessLogic") +// public HealthCheckBusinessLogic healthCheckBusinessLogic() { +// return new HealthCheckBusinessLogic(); +// } +// +// @Bean(name = "distribution-engine-cluster-health") +// public DistributionEngineClusterHealth distributionEngineClusterHealth() { +// return new DistributionEngineClusterHealth(); +// } +// +// @Bean(name = "cassandra-health-check") +// public CassandraHealthCheck cassandraHealthCheck() { +// return new CassandraHealthCheck(); +// } + +// @Bean(name = "switchover-detector") +// public SwitchoverDetector switchoverDetector() { +// return new SwitchoverDetector(); +// } + + @Bean(name = "service-business-logic") + public ServiceBusinessLogic serviceBusinessLogic() { + return new ServiceBusinessLogic(); + } + + @Bean(name = "capability-type-operation") + public CapabilityTypeOperation CapabilityTypeOperation() { + return new CapabilityTypeOperation(); + } + + @Bean(name = "lifecycle-business-logic") + public LifecycleBusinessLogic lifecycleBusinessLogic() { + return new LifecycleBusinessLogic(); + } + + @Bean(name = "property-operation") + public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) { + return new PropertyOperation(titanGenericDao); + } + + @Bean(name = "csar-operation") + public CsarOperation csarOperation() { + return new CsarOperation(); + } + + @Bean(name = "vf-component-instance-business-logic") + public VFComponentInstanceBusinessLogic vFComponentInstanceBusinessLogic() { + return new VFComponentInstanceBusinessLogic(); + } + + @Bean(name = "resource-import-manager") + public ResourceImportManager resourceImportManager() { + return new ResourceImportManager(); + } + + @Bean(name = "group-business-logic") + public GroupBusinessLogic groupBusinessLogic() { + return new GroupBusinessLogic(); + } + + @Bean(name = "inputs-business-logic") + public InputsBusinessLogic inputsBusinessLogic() { + return new InputsBusinessLogic(); + } + + @Bean(name = "composition-business-logic") + public CompositionBusinessLogic compositionBusinessLogic() { + return new CompositionBusinessLogic(); + } + + @Bean(name = "artifacts-business-logic") + public ArtifactsBusinessLogic artifactsBusinessLogic() { + return new ArtifactsBusinessLogic(); + } + + @Bean(name = "component-cache") + public ComponentCache componentCache() { + return new ComponentCache(); + } + + @Bean(name = "componentUtils") + public ComponentsUtils componentsUtils() { + return new ComponentsUtils(); + } + + @Bean(name = "user-business-logic") + public UserBusinessLogic userBusinessLogic() { + return new UserBusinessLogic(); + } + + @Bean(name = "graph-lock-operation") + public GraphLockOperation graphLockOperation() { + return new GraphLockOperation(); + } + + @Bean(name = "titan-generic-dao") + public TitanGenericDao titanGenericDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) { + return new TitanGenericDao(titanGraphClient); + } + + @Bean(name = "element-operation") + public ElementOperation elementOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) { + return new ElementOperation(titanGenericDao); + } + + @Bean(name = "group-operation") + public GroupOperation groupOperation() { + return new GroupOperation(); + } + + @Bean(name = "group-instance-operation") + public GroupInstanceOperation groupInstanceOperation() { + return new GroupInstanceOperation(); + } + + @Bean(name = "group-type-operation") + public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao, @Qualifier("property-operation") PropertyOperation propertyOperation) { + return new GroupTypeOperation(titanGenericDao, propertyOperation); + } + + @Bean(name = "tosca-operation-facade") + public ToscaOperationFacade toscaOperationFacade() { + return new ToscaOperationFacade(); + } + + @Bean(name = "distribution-engine") + public DistributionEngine distributionEngine() { + return null; + } + + @Bean(name = "audit-cassandra-dao") + public AuditCassandraDao auditCassandraDao() { + return new AuditCassandraDao(); + } + + @Bean(name = "service-component-instance-business-logic") + public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() { + return new ServiceComponentInstanceBusinessLogic(); + } + + @Bean("tosca-export-handler") + public ToscaExportHandler toscaExportHandler() { + return new ToscaExportHandler(); + } + + @Bean(name = "component-instance-operation") + public ComponentInstanceOperation componentInstanceOperation() { + return new ComponentInstanceOperation(); + } + + @Bean(name = "additional-information-business-logic") + public AdditionalInformationBusinessLogic additionalInformationBusinessLogic() { + return new AdditionalInformationBusinessLogic(); + } + + @Bean(name = "auditing-manager") + public AuditingManager auditingManager() { + return new AuditingManager(); + } + + @Bean(name = "auditing-dao") + public AuditingDao auditingDao() { + return new AuditingDao(); + } + + @Bean(name = "elasticsearch-client", initMethod = "initialize") + public ElasticSearchClient elasticSearchClient() { + return new ElasticSearchClient(); + } + + @Bean(name = "csar-utils") + public CsarUtils csarUtils() { + return new CsarUtils(); + } + + @Bean(name = "service-distribution-artifacts-builder") + public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() { + return new ServiceDistributionArtifactsBuilder(); + } + + @Bean(name = "product-business-logic") + public ProductBusinessLogic productBusinessLogic() { + return null; + } + + @Bean(name = "dataDefinitionsValuesMergingBusinessLogic") + public DataDefinitionsValuesMergingBusinessLogic dataDefinitionsValuesMergingBusinessLogic() { + return new DataDefinitionsValuesMergingBusinessLogic(); + } + + @Bean(name = "artifacts-resolver") + public ArtifactsResolver artifactsResolver() { + return new ArtifactResolverImpl(); + } + + @Bean(name = "InputsValuesMergingBusinessLogic") + public InputsValuesMergingBusinessLogic InputsValuesMergingBusinessLogic(){ + return new InputsValuesMergingBusinessLogic(); + } + + @Bean(name = "GenericTypeBusinessLogic") + public GenericTypeBusinessLogic genericTypeBusinessLogic(){ + return new GenericTypeBusinessLogic(); + } + + @Bean(name ="componentInstanceMergeDataBusinessLogic") + public ComponentInstanceMergeDataBusinessLogic componentInstanceMergeDataBusinessLogic(){ + return new ComponentInstanceMergeDataBusinessLogic(); + } + + @Bean(name ="heatEnvArtifactsMergeBusinessLogic") + public HeatEnvArtifactsMergeBusinessLogic heatEnvArtifactsMergeBusinessLogic(){ + return new HeatEnvArtifactsMergeBusinessLogic(); + } + + @Bean(name = "elasticsearchConfig") + public PropertiesFactoryBean mapper() { + String configHome = System.getProperty("config.home"); + PropertiesFactoryBean bean = new PropertiesFactoryBean(); + bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml")); + return bean; + } } diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java index 21671a67a9..b8e2347970 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java @@ -4,7 +4,8 @@ import java.util.List; import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutionResult; import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutorImpl; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage.AspectMigrationEnum; import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult; import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver; import org.openecomp.sdc.asdctool.migration.service.SdcRepoService; @@ -30,16 +31,17 @@ public class SdcMigrationTool { public boolean migrate(boolean enforceAll) { LOGGER.info("starting migration process"); handleEnforceMigrationFlag(enforceAll); - List migrations = migrationsResolver.resolveMigrations(); + List migrations = migrationsResolver.resolveMigrations(); LOGGER.info("there are {} migrations task to execute", migrations.size()); - for (Migration migration : migrations) { + for (IMigrationStage migration : migrations) { try { MigrationExecutionResult executionResult = new MigrationExecutorImpl().execute(migration); if (migrationHasFailed(executionResult)) { LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(), migration.getVersion().toString(), executionResult.getMsg()); return false; } - sdcRepoService.createMigrationTask(executionResult.toMigrationTaskEntry()); + if(migration.getAspectMigration() == AspectMigrationEnum.MIGRATION) + sdcRepoService.createMigrationTask(executionResult.toMigrationTaskEntry()); } catch (RuntimeException e) { LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(), migration.getVersion().toString(), e); return false; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java index accd9eca53..aa871914d6 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java @@ -1,11 +1,11 @@ package org.openecomp.sdc.asdctool.migration.core.execution; +import java.util.Date; + import org.openecomp.sdc.asdctool.migration.core.DBVersion; import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult; import org.openecomp.sdc.be.resources.data.MigrationTaskEntry; -import java.util.Date; - public class MigrationExecutionResult { private MigrationResult.MigrationStatus migrationStatus; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutor.java index aba5056b48..cf3e50a312 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutor.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutor.java @@ -1,7 +1,7 @@ package org.openecomp.sdc.asdctool.migration.core.execution; import org.openecomp.sdc.asdctool.migration.core.MigrationException; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage; public interface MigrationExecutor { @@ -10,6 +10,6 @@ public interface MigrationExecutor { * @return a {@link MigrationExecutionResult} with the relevant data on the current migration execution; * @throws MigrationException in case there was an unexpected exception during migration */ - MigrationExecutionResult execute(Migration migration) throws MigrationException; + MigrationExecutionResult execute(IMigrationStage migration) throws MigrationException; } diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java index 39219aee23..2b3b28a737 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java @@ -1,7 +1,7 @@ package org.openecomp.sdc.asdctool.migration.core.execution; import org.openecomp.sdc.asdctool.migration.core.MigrationException; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage; import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,7 +13,7 @@ public class MigrationExecutorImpl implements MigrationExecutor { private static final Logger LOGGER = LoggerFactory.getLogger(MigrationExecutorImpl.class); @Override - public MigrationExecutionResult execute(Migration migration) throws MigrationException { + public MigrationExecutionResult execute(IMigrationStage migration) throws MigrationException { try { LOGGER.info("starting migration {}. description: {}. version {}", migration.getClass().getName(), migration.description(), migration.getVersion().toString()); StopWatch stopWatch = new StopWatch(); @@ -29,12 +29,12 @@ public class MigrationExecutorImpl implements MigrationExecutor { } } - private MigrationExecutionResult logAndCreateExecutionResult(Migration migration, MigrationResult migrationResult, double executionTime) { + private MigrationExecutionResult logAndCreateExecutionResult(IMigrationStage migration, MigrationResult migrationResult, double executionTime) { LOGGER.info("finished migration {}. with version {}. migration status: {}, migration message: {}, execution time: {}", migration.getClass().getName(), migration.getVersion().toString(), migrationResult.getMigrationStatus().name(), migrationResult.getMsg(), executionTime); return createMigrationTask(migration, migrationResult, executionTime); } - private MigrationExecutionResult createMigrationTask(Migration migration, MigrationResult migrationResult, double totalTimeSeconds) { + private MigrationExecutionResult createMigrationTask(IMigrationStage migration, MigrationResult migrationResult, double totalTimeSeconds) { MigrationExecutionResult migrationExecutionResult = new MigrationExecutionResult(); migrationExecutionResult.setExecutionTime(totalTimeSeconds); migrationExecutionResult.setMigrationStatus(migrationResult.getMigrationStatus()); diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/IMigrationStage.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/IMigrationStage.java new file mode 100644 index 0000000000..d58da7d784 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/IMigrationStage.java @@ -0,0 +1,20 @@ +package org.openecomp.sdc.asdctool.migration.core.task; + +import org.openecomp.sdc.asdctool.migration.core.DBVersion; + +public interface IMigrationStage { + + String description(); + + DBVersion getVersion(); + + MigrationResult migrate(); + + AspectMigrationEnum getAspectMigration(); + + public enum AspectMigrationEnum { + BEFORE_MIGRATION, + MIGRATION, + AFTER_MIGRATION; + } +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/Migration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/Migration.java index 58f201acf5..6ec95dce8b 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/Migration.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/Migration.java @@ -1,14 +1,10 @@ package org.openecomp.sdc.asdctool.migration.core.task; - -import org.openecomp.sdc.asdctool.migration.core.DBVersion; - -public interface Migration { - - String description(); - - DBVersion getVersion(); - - MigrationResult migrate(); +public interface Migration extends IMigrationStage{ + @Override + default + AspectMigrationEnum getAspectMigration(){ + return AspectMigrationEnum.MIGRATION; + } } diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java new file mode 100644 index 0000000000..f20c36471d --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java @@ -0,0 +1,19 @@ +package org.openecomp.sdc.asdctool.migration.core.task; + +import org.openecomp.sdc.asdctool.migration.core.DBVersion; + +public interface PostMigration extends IMigrationStage { + + @Override + default + public DBVersion getVersion() { + return DBVersion.CURRENT_VERSION; + } + + @Override + default + AspectMigrationEnum getAspectMigration(){ + return AspectMigrationEnum.AFTER_MIGRATION; + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java index 03fd8d1e7d..b0bfabb059 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java @@ -1,11 +1,9 @@ package org.openecomp.sdc.asdctool.migration.dao; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; -import com.datastax.driver.mapping.Mapper; -import com.datastax.driver.mapping.MappingManager; -import fj.data.Either; +import java.math.BigInteger; + +import javax.annotation.PostConstruct; + import org.apache.commons.lang3.tuple.ImmutablePair; import org.openecomp.sdc.be.dao.cassandra.CassandraDao; import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus; @@ -13,10 +11,14 @@ import org.openecomp.sdc.be.resources.data.MigrationTaskEntry; import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; -import javax.annotation.PostConstruct; -import java.math.BigInteger; +import com.datastax.driver.core.ResultSet; +import com.datastax.driver.core.Row; +import com.datastax.driver.core.Session; +import com.datastax.driver.mapping.Mapper; +import com.datastax.driver.mapping.MappingManager; + +import fj.data.Either; public class MigrationTasksDao extends CassandraDao { diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java index 1e8a533624..dc2114dcd7 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java @@ -7,12 +7,9 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader; import org.openecomp.sdc.asdctool.migration.config.MigrationSpringConfig; import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool; -import org.openecomp.sdc.be.config.ConfigurationManager; -import org.openecomp.sdc.common.api.ConfigurationSource; -import org.openecomp.sdc.common.impl.ExternalConfiguration; -import org.openecomp.sdc.common.impl.FSConfigurationSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.AnnotationConfigApplicationContext; @@ -25,7 +22,7 @@ public class MigrationMenu { CommandLine commandLine = initCmdLineOptions(args); String appConfigDir = commandLine.getOptionValue("c"); boolean enforceAll = commandLine.hasOption("e"); - uploadConfiguration(appConfigDir); + ConfigurationUploader.uploadConfigurationFiles(appConfigDir); AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(MigrationSpringConfig.class); doMigrate(enforceAll, context); @@ -91,10 +88,4 @@ public class MigrationMenu { .build(); } - private static void uploadConfiguration(String appConfigDir) { - ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir); - ConfigurationManager configurationManager = new ConfigurationManager(configurationSource); - } - - } diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java index b272d45ff0..22add31eb4 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java @@ -1,16 +1,16 @@ package org.openecomp.sdc.asdctool.migration.resolver; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; - import java.util.List; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage; + public interface MigrationResolver { /** * * @return a list of {@code T} */ - List resolveMigrations(); + List resolveMigrations(); } diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java index 4af5d76123..182996f5e4 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java @@ -1,38 +1,49 @@ package org.openecomp.sdc.asdctool.migration.resolver; -import org.openecomp.sdc.asdctool.migration.core.DBVersion; -import org.openecomp.sdc.asdctool.migration.core.task.Migration; -import org.openecomp.sdc.asdctool.migration.service.SdcRepoService; - import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; +import org.openecomp.sdc.asdctool.migration.core.DBVersion; +import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage; +import org.openecomp.sdc.asdctool.migration.core.task.Migration; +import org.openecomp.sdc.asdctool.migration.core.task.PostMigration; +import org.openecomp.sdc.asdctool.migration.service.SdcRepoService; + public class SpringBeansMigrationResolver implements MigrationResolver { private List migrations = new ArrayList<>(); - + private List postMigrations = new ArrayList<>(); + private SdcRepoService sdcRepoService; - public SpringBeansMigrationResolver(List migrations, SdcRepoService sdcRepoService) { + public SpringBeansMigrationResolver(List migrations, List postMigrations, SdcRepoService sdcRepoService) { this.migrations = migrations; + this.postMigrations = postMigrations; this.sdcRepoService = sdcRepoService; } @Override - public List resolveMigrations() { + public List resolveMigrations() { migrations.sort(Comparator.comparing(Migration::getVersion)); - return resolveNonExecutedMigrations(); + List allTasks = resolveNonExecutedMigrations(); + allTasks.addAll(postMigrations); + return allTasks; } //package private for testing void setMigrations(List migrations) { this.migrations = migrations; } + + //package private for testing + void setPostMigrations(List postMigrations) { + this.postMigrations = postMigrations; + } - private List resolveNonExecutedMigrations() { + private List resolveNonExecutedMigrations() { DBVersion latestDBVersion = sdcRepoService.getLatestDBVersion(); return migrations.stream() .filter(mig -> isMigrationVersionGreaterThanLatestVersion(latestDBVersion, mig)) diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java index 20451f2c63..cf4affb345 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java @@ -1,11 +1,11 @@ package org.openecomp.sdc.asdctool.migration.service; +import java.math.BigInteger; + import org.openecomp.sdc.asdctool.migration.core.DBVersion; import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao; import org.openecomp.sdc.be.resources.data.MigrationTaskEntry; -import java.math.BigInteger; - public class SdcRepoService { private MigrationTasksDao migrationTasksDao; diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java new file mode 100644 index 0000000000..59259219db --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java @@ -0,0 +1,9 @@ +package org.openecomp.sdc.asdctool.migration.tasks.handlers; + +public interface OutputHandler { + + public void initiate(Object... title); + public void addRecord(Object... record); + public boolean writeOutput(); + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java new file mode 100644 index 0000000000..91eaed5429 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java @@ -0,0 +1,66 @@ +package org.openecomp.sdc.asdctool.migration.tasks.handlers; + +import java.io.FileOutputStream; +import java.text.DateFormat; +import java.text.SimpleDateFormat; + +import org.apache.poi.hssf.usermodel.HSSFWorkbook; +import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.Row; +import org.apache.poi.ss.usermodel.Sheet; +import org.apache.poi.ss.usermodel.Workbook; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class XlsOutputHandler implements OutputHandler { + + private final static Logger LOGGER = LoggerFactory.getLogger(XlsOutputHandler.class); + + private Workbook workbook; + private Sheet activeSheet; + private Row currentRow; + int rowCount = 0; + + public XlsOutputHandler(Object... title){ + initiate(title); + } + + @Override + public void initiate(Object... title) { + LOGGER.info("Starting to initiate xls output handler. "); + workbook = new HSSFWorkbook(); + activeSheet = workbook.createSheet("Upgrade Migration 1710.0 results"); + addRecord(title); + LOGGER.info("Xls output handler has been initiated. "); + } + + @Override + public void addRecord(Object... record) { + LOGGER.debug("Going to add record {} to output. ", record); + currentRow = activeSheet.createRow(rowCount++); + LOGGER.debug("A new row has been created"); + int columnCount = 0; + Cell cell; + for(Object cellValue : record){ + cell = currentRow.createCell(columnCount++); + if(cellValue != null) + cell.setCellValue(cellValue.toString()); + } + } + + @Override + public boolean writeOutput() { + try { + DateFormat df = new SimpleDateFormat("yyyyMMdd_HHmmss"); + String fileName = "UpgradeMigration1710Results_" + df.format(System.currentTimeMillis()) + ".xls"; + LOGGER.info("Going to write xls output file {}. ", fileName); + workbook.write(new FileOutputStream(fileName)); + return true; + } catch (Exception e) { + LOGGER.error("Failed to write an output file upon Upgrade migration 1710. Exception {} occured. ", e.getMessage()); + e.printStackTrace(); + return false; + } + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java new file mode 100644 index 0000000000..166512d5b4 --- /dev/null +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java @@ -0,0 +1,739 @@ +package org.openecomp.sdc.asdctool.migration.tasks.mig1710; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult; +import org.openecomp.sdc.asdctool.migration.core.task.PostMigration; +import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler; +import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic; +import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic; +import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic; +import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction; +import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction.LifecycleChanceActionEnum; +import org.openecomp.sdc.be.config.ConfigurationManager; +import org.openecomp.sdc.be.dao.api.ActionStatus; +import org.openecomp.sdc.be.dao.jsongraph.GraphVertex; +import org.openecomp.sdc.be.dao.jsongraph.TitanDao; +import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum; +import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum; +import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum; +import org.openecomp.sdc.be.dao.titan.TitanOperationStatus; +import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum; +import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields; +import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum; +import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum; +import org.openecomp.sdc.be.impl.ComponentsUtils; +import org.openecomp.sdc.be.model.ComponentInstance; +import org.openecomp.sdc.be.model.LifeCycleTransitionEnum; +import org.openecomp.sdc.be.model.LifecycleStateEnum; +import org.openecomp.sdc.be.model.Resource; +import org.openecomp.sdc.be.model.User; +import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade; +import org.openecomp.sdc.be.model.jsontitan.utils.ModelConverter; +import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation; +import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus; +import org.openecomp.sdc.be.model.operations.impl.CsarOperation; +import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter; +import org.openecomp.sdc.exception.ResponseFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import fj.data.Either; + +@Component +public class UpgradeMigration1710 implements PostMigration { + + private static final String UNKNOWN = "UNKNOWN"; + + private static final String CHECKOUT_MESSAGE = "checkout upon upgrade migration"; + + private static final String FAILED_TO_CHANGE_STATE_OF_COMPONENT = "Failed to change state of component with name {}, invariantUUID {}, version {} to {}. "; + + private static final String FAILED_TO_UPGRADE_COMPONENT = "Failed to upgrade {} with name {}, invariantUUID {}, version {}. Operation {}. The reason for failure: {}. "; + + private static final String UPGRADE_COMPONENT_SUCCEEDED = "Upgrade of {} with name {}, invariantUUID {}, version {} finished successfully. "; + + private static final String UPGRADE_VFS_FAILED = "Upgrade VFs upon upgrade migration 1710 process failed. "; + + private static final Logger LOGGER = LoggerFactory.getLogger(UpgradeMigration1710.class); + + @Autowired + private TitanDao titanDao; + + @Autowired + private ToscaOperationFacade toscaOperationFacade; + + @Autowired + private LifecycleBusinessLogic lifecycleBusinessLogic; + + @Autowired + private IUserAdminOperation userAdminOperation; + + @Autowired + private ResourceBusinessLogic resourceBusinessLogic; + + @Autowired + private CsarOperation csarOperation; + + @Autowired + private ServiceComponentInstanceBusinessLogic componentInstanceBusinessLogic; + + @Autowired + private ComponentsUtils componentsUtils; + + private final XlsOutputHandler outputHandler = new XlsOutputHandler("COMPONENT TYPE", "COMPONENT NAME", "COMPONENT UUID", "COMPONENT UNIQUE_ID", "UPGRADE STATUS", "DESCRIPTION"); + + private User user = null; + + private final LifecycleChangeInfoWithAction changeInfo = new LifecycleChangeInfoWithAction(CHECKOUT_MESSAGE, LifecycleChanceActionEnum.UPGRADE_MIGRATION); + + private final Map latestGenericTypes = new HashMap<>(); + + private boolean isVfcUpgradeRequired = false; + + private boolean skipIfUpgradeVfFailed = true; + + /** below methods is defined on package level for testing + * where Spring object injection is not used **/ + void setUserAdminOperation(IUserAdminOperation userAdminOperation) { this.userAdminOperation = userAdminOperation; } + + void setTitanDao(TitanDao titanDao) { this.titanDao = titanDao; } + + void setTosckaOperationFacade(ToscaOperationFacade toscaOperationFacade) { this.toscaOperationFacade = toscaOperationFacade; } + + void setLifecycleBusinessLogic(LifecycleBusinessLogic lifecycleBusinessLogic) { this.lifecycleBusinessLogic = lifecycleBusinessLogic; } + + void setComponentsUtils(ComponentsUtils componentsUtils) { this.componentsUtils = componentsUtils; } + + + /***********************************************/ + + @Override + public String description() { + return "Upgrade migration 1710 - post migration task, which is dedicated to upgrade all latest certified (and not checked out) Node types, VFs and Services. "; + } + + private enum UpgradeStatus{ + UPGRADED, + NOT_UPGRADED + } + + @Override + public MigrationResult migrate() { + LOGGER.info("Starting upgrade migration 1710 process. "); + MigrationResult migrationResult = new MigrationResult(); + + try{ + boolean result = true; + + isVfcUpgradeRequired = !ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeVSPsFlag(); + skipIfUpgradeVfFailed = ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeFailedVfs(); + final String userId = ConfigurationManager.getConfigurationManager().getConfiguration().getAutoHealingOwner(); + + Either userReq = userAdminOperation.getUserData(userId, false); + if (userReq.isRight()) { + result = false; + LOGGER.error("Upgrade migration was failed. User {} resolve failed: {} ", userId, userReq.right().value()); + } + else { + user = userReq.left().value(); + LOGGER.info("User {} will perform upgrade operation", user.toString()); + } + + if(result){ + result = upgradeNodeTypes(); + } + if(result){ + result = upgradeVFs(); + } + if(result){ + upgradeServices(); + } + if(result){ + LOGGER.info("Upgrade migration 1710 has been successfully finished. "); + titanDao.commit(); + migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED); + } else { + LOGGER.info("Upgrade migration 1710 was failed. "); + titanDao.rollback(); + migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.FAILED); + } + } catch(Exception e){ + LOGGER.error("Upgrade migration 1710 was failed. ", e); + titanDao.rollback(); + migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.FAILED); + } finally { + outputHandler.writeOutput(); + } + return migrationResult; + } + + private StorageOperationStatus upgradeServices() { + LOGGER.info("Starting upgrade services upon upgrade migration 1710 process. "); + Map latestOriginResourceVersions = new HashMap<>(); + Either, TitanOperationStatus> getServicesRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.SERVICE); + if(getServicesRes.isRight()){ + return StorageOperationStatus.GENERAL_ERROR; + } + for(String currUid : getServicesRes.left().value()){ + try{ + if(handleService(currUid, latestOriginResourceVersions)){ + titanDao.commit(); + } else { + processComponentUpgradeFailure(ComponentTypeEnum.SERVICE.name(), currUid, ""); + } + } catch(Exception e){ + processComponentUpgradeFailure(ComponentTypeEnum.SERVICE.name(), currUid, e.getMessage()); + } + } + return StorageOperationStatus.OK; + } + + private void processComponentUpgradeFailure(final String name, final String currUid, final String reason) { + LOGGER.error("Failed to upgrade {} with uniqueId {} due to a reason {}. ", name, currUid, reason); + titanDao.rollback(); + } + + private boolean handleService(String uniqueId, Map latestOriginResourceVersions) { + LOGGER.info("Starting upgrade Service with uniqueId {} upon upgrade migration 1710 process. ", uniqueId); + Either getServiceRes = toscaOperationFacade.getToscaElement(uniqueId); + if(getServiceRes.isRight()){ + LOGGER.error("Failed to upgrade service with uniqueId {} due to {}. ", uniqueId, getServiceRes.right().value()); + outputHandler.addRecord(ComponentTypeEnum.SERVICE.name(), UNKNOWN, UNKNOWN, uniqueId, MigrationResult.MigrationStatus.FAILED.name(), getServiceRes.right().value()); + return false; + } + String derivedFromGenericType = getServiceRes.left().value().getDerivedFromGenericType(); + LOGGER.debug("derivedFromGenericType: {}", derivedFromGenericType ); + if (derivedFromGenericType == null) { + //malformed field value, upgrade required + return upgradeService(getServiceRes.left().value()); + } + if(!latestGenericTypes.containsKey(derivedFromGenericType)){ + Either, TitanOperationStatus> getDerivedRes = findDerivedResources(derivedFromGenericType); + if(getDerivedRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, getServiceRes.left().value().getComponentType().getValue(), getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getVersion(), "findDerivedResources", getDerivedRes.right().value()); + outputHandler.addRecord( getServiceRes.left().value().getComponentType().name(),getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getDerivedRes.right().value()); + return false; + } + latestGenericTypes.put(derivedFromGenericType, getDerivedRes.left().value().get(0)); + } + if(latestVersionExists(latestGenericTypes.get(derivedFromGenericType), getServiceRes.left().value().getDerivedFromGenericVersion())){ + return upgradeService(getServiceRes.left().value()); + } + if(!collectLatestOriginResourceVersions(getServiceRes.left().value(), latestOriginResourceVersions)){ + return false; + } + if(shouldUpgrade(getServiceRes.left().value(), latestOriginResourceVersions)){ + return upgradeService(getServiceRes.left().value()); + } + outputHandler.addRecord(getServiceRes.left().value().getComponentType().name(), getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), UpgradeStatus.NOT_UPGRADED); + return true; + } + + private boolean collectLatestOriginResourceVersions(org.openecomp.sdc.be.model.Component component, Map latestOriginResourceVersions) { + if(CollectionUtils.isNotEmpty(component.getComponentInstances())){ + for(ComponentInstance instance : component.getComponentInstances()){ + if(instance.getOriginType() != OriginTypeEnum.ServiceProxy && !latestOriginResourceVersions.containsKey(instance.getToscaComponentName())){ + VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name()); + Either getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata); + if(getOriginRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "toscaOperationFacade.getLatestCertifiedByToscaResourceName", getOriginRes.right().value()); + outputHandler.addRecord( component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getOriginRes.right().value()); + return false; + } + latestOriginResourceVersions.put(instance.getToscaComponentName(), getOriginRes.left().value().getVersion()); + } + } + } + return true; + } + + private boolean shouldUpgrade(org.openecomp.sdc.be.model.Component component, Map latestOriginResources) { + boolean shouldUpgrade = false; + if(CollectionUtils.isNotEmpty(component.getComponentInstances())){ + for(ComponentInstance instance : component.getComponentInstances()){ + if(instance.getOriginType() == OriginTypeEnum.ServiceProxy){ + LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains Service proxy instance {}, than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName()); + shouldUpgrade = true; + } + if(isGreater(latestOriginResources.get(instance.getToscaComponentName()), instance.getComponentVersion())){ + LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains instance {} from outdated version of origin {} {} , than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName(), instance.getComponentName(), instance.getComponentVersion()); + shouldUpgrade = true; + } + } + } + return shouldUpgrade; + } + + private boolean upgradeService(org.openecomp.sdc.be.model.Component service) { + String serviceName = service.getName(); + String serviceUuid = service.getUUID(); + LOGGER.info("Starting upgrade Service with name {}, invariantUUID {}, version {} upon upgrade migration 1710 process. ", serviceName, service.getInvariantUUID(), service.getVersion()); + LOGGER.info("Starting to perform check out of service {}. ", serviceName); + Either checkouRes = lifecycleBusinessLogic.changeComponentState(service.getComponentType(), service.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false); + if(checkouRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "lifecycleBusinessLogic.changeComponentState", checkouRes.right().value().getFormattedMessage()); + outputHandler.addRecord(service.getComponentType().name(), serviceName, serviceUuid, service.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage()); + return false; + } + Either updateCompositionRes = updateComposition(checkouRes.left().value()); + if(updateCompositionRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage()); + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage()); + return false; + } + Either certifyRes = performFullCertification(checkouRes.left().value()); + if(certifyRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "performFullCertification", certifyRes.right().value().getFormattedMessage()); + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage()); + return false; + } + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), serviceUuid, checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), UpgradeStatus.UPGRADED); + return true; + } + + private Either updateComposition(org.openecomp.sdc.be.model.Component component) { + Either upgradeInstanceRes; + for(ComponentInstance instance : component.getComponentInstances()){ + upgradeInstanceRes = upgradeInstance(component, instance); + if(upgradeInstanceRes.isRight()) { + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage()); + outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage()); + return Either.right(upgradeInstanceRes.right().value()); + } + } + return Either.left(component); + } + + private Either upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) { + LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName()); + ComponentInstance newComponentInstance = new ComponentInstance(instance); + if(instance.getOriginType() == OriginTypeEnum.ServiceProxy){ + return upgradeServiceProxyInstance(component, instance, newComponentInstance); + } + return upgradeResourceInstance(component, instance, newComponentInstance); + } + + private Either upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, + ComponentInstance instance, ComponentInstance newComponentInstance) { + LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName()); + VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name()); + Either getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata); + if(getOriginRes.isRight()){ + LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ", + component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value()); + return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType()))); + } + newComponentInstance.setComponentName(getOriginRes.left().value().getName()); + newComponentInstance.setComponentUid(getOriginRes.left().value().getUniqueId()); + newComponentInstance.setComponentVersion(getOriginRes.left().value().getVersion()); + newComponentInstance.setToscaComponentName(((Resource)getOriginRes.left().value()).getToscaResourceName()); + if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){ + return changeAssetVersion(component, instance, newComponentInstance); + } + + //upgrade nodes contained by CVFC + if(isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC && + !upgradeVf(getOriginRes.left().value().getUniqueId())) { + return Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR)); + } + LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ", + component.getComponentType().getValue(), instance.getName()); + return Either.left(instance); + } + + private Either upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) { + Either, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(instance.getSourceModelInvariant()); + if(getLatestOriginServiceRes.isRight()){ + return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertTitanStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType()))); + } + newComponentInstance.setComponentVersion((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.VERSION)); + newComponentInstance.setSourceModelUid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UNIQUE_ID)); + newComponentInstance.setSourceModelName((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.NAME)); + newComponentInstance.setSourceModelUuid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UUID)); + return changeAssetVersion(component, instance, newComponentInstance); + } + + private Either, TitanOperationStatus> getLatestCertifiedService(String invariantUUID) { + + Map propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class); + propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name()); + propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true); + propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + propertiesToMatch.put(GraphPropertyEnum.INVARIANT_UUID, invariantUUID); + Map propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class); + propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true); + return titanDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata); + } + + private Either changeAssetVersion(org.openecomp.sdc.be.model.Component containerComponent, ComponentInstance instance, ComponentInstance newComponentInstance) { + return componentInstanceBusinessLogic.changeComponentInstanceVersion(ComponentTypeEnum.SERVICE_PARAM_NAME, containerComponent.getUniqueId(), instance.getUniqueId(), user.getUserId(), newComponentInstance); + } + + private boolean upgradeNodeTypes() { + LOGGER.info("Starting upgrade node types upon upgrade migration 1710 process. "); + String toscaConformanceLevel = ConfigurationManager.getConfigurationManager().getConfiguration().getToscaConformanceLevel(); + Map> resourcesForUpgrade = ConfigurationManager.getConfigurationManager().getConfiguration().getResourcesForUpgrade(); + Map upgradedNodeTypesMap = new HashMap<> (); + List nodeTypes; + if(resourcesForUpgrade.containsKey(toscaConformanceLevel)){ + nodeTypes = resourcesForUpgrade.get(toscaConformanceLevel); + if(nodeTypes !=null && !nodeTypes.isEmpty()){ + Either, TitanOperationStatus> getRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.NODE_TYPE, ComponentTypeEnum.RESOURCE); + if(getRes.isRight()){ + return false; + } + List allNodeTypes = getRes.left().value(); + + for(String toscaResourceName: nodeTypes){ + Either, StorageOperationStatus> status = getLatestByName(GraphPropertyEnum.TOSCA_RESOURCE_NAME, toscaResourceName); + if (status.isRight()) { + LOGGER.error("Failed to find node type {} ", toscaResourceName); + return false; + } + List vList = status.left().value(); + for (GraphVertex vertex : vList) { + StorageOperationStatus updateRes = upgradeNodeType(vertex, upgradedNodeTypesMap, allNodeTypes, nodeTypes); + if (updateRes != StorageOperationStatus.OK) { + return false; + } + } + } + } + } + return true; + } + + private boolean upgradeVFs() { + LOGGER.info("Starting upgrade VFs upon upgrade migration 1710 process. "); + Either, TitanOperationStatus> getVfsRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.RESOURCE); + if(getVfsRes.isRight()){ + LOGGER.info(UPGRADE_VFS_FAILED); + return false; + } + for (String currUid : getVfsRes.left().value()) { + try { + if (!upgradeVf(currUid)) { + processComponentUpgradeFailure(ComponentTypeEnum.RESOURCE.name(), currUid, ""); + if (!skipIfUpgradeVfFailed) { + LOGGER.info(UPGRADE_VFS_FAILED); + return false; + } + } + titanDao.commit(); + } catch (Exception e) { + processComponentUpgradeFailure(ComponentTypeEnum.RESOURCE.name(), currUid, e.getMessage()); + if (!skipIfUpgradeVfFailed) { + LOGGER.info(UPGRADE_VFS_FAILED); + return false; + } + } + } + LOGGER.info("Upgrade VFs upon upgrade migration 1710 process finished successfully. "); + return true; + } + + private boolean upgradeVf(String uniqueId) { + LOGGER.info("Starting upgrade VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId); + Either latestVersionRes; + Either getRes = toscaOperationFacade.getToscaElement(uniqueId); + if(getRes.isRight()){ + LOGGER.debug("Failed to fetch VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId); + outputHandler.addRecord(ComponentTypeEnum.RESOURCE.name(), UNKNOWN, UNKNOWN, uniqueId, MigrationResult.MigrationStatus.FAILED.name(), getRes.right().value()); + return false; + } + if(StringUtils.isNotEmpty(getRes.left().value().getCsarUUID())){ + LOGGER.info("Going to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID()); + latestVersionRes = csarOperation.getCsarLatestVersion(getRes.left().value().getCsarUUID(), user); + if(latestVersionRes.isRight()){ + LOGGER.debug("Failed to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID()); + outputHandler.addRecord(getRes.left().value().getComponentType().name(), getRes.left().value().getName(), getRes.left().value().getUUID(), getRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(),latestVersionRes.right().value()); + return false; + } + if(isGreater(latestVersionRes.left().value(), getRes.left().value().getCsarVersion())){ + return upgradeVfWithLatestVsp(getRes.left().value(), latestVersionRes); + } + if (!isVfcUpgradeRequired){ + LOGGER.warn("Warning: No need to upgrade VF with name {}, invariantUUID {}, version {} and VSP version {}. No new version of VSP. ", getRes.left().value().getName(), getRes.left().value().getInvariantUUID(), getRes.left().value().getVersion(), getRes.left().value().getCsarVersion()); + } + } + return upgradeComponentWithLatestGeneric(getRes.left().value()); + } + + private boolean upgradeVfWithLatestVsp(org.openecomp.sdc.be.model.Component vf, Either latestVersionRes) { + LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {} and latest VSP version {} upon upgrade migration 1710 process. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value()); + LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", vf.getName(),vf.getInvariantUUID(), vf.getVersion()); + Either checkouRes = lifecycleBusinessLogic.changeComponentState(vf.getComponentType(), vf.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false); + if(checkouRes.isRight()){ + outputHandler.addRecord(vf.getComponentType().name(), vf.getName(), vf.getUUID(), vf.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage()); + return false; + } + LOGGER.info("Starting update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value()); + Resource resourceToUpdate = new Resource(((Resource) checkouRes.left().value()).getComponentMetadataDefinition()); + resourceToUpdate.setDerivedFromGenericType(((Resource) checkouRes.left().value()).getDerivedFromGenericType()); + resourceToUpdate.setDerivedFromGenericVersion(((Resource) checkouRes.left().value()).getDerivedFromGenericVersion()); + resourceToUpdate.setCsarVersion(Double.toString(Double.parseDouble(latestVersionRes.left().value()))); + Either updateResourceFromCsarRes = resourceBusinessLogic.validateAndUpdateResourceFromCsar(resourceToUpdate, user, null, null, resourceToUpdate.getUniqueId()); + if(updateResourceFromCsarRes.isRight()){ + outputHandler.addRecord(resourceToUpdate.getComponentType().name(), resourceToUpdate.getName(), resourceToUpdate.getUUID(), resourceToUpdate.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateResourceFromCsarRes.right().value().getFormattedMessage()); + LOGGER.info("Failed to update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value()); + return false; + } + Either certifyRes = performFullCertification(checkouRes.left().value()); + if(certifyRes.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), LifeCycleTransitionEnum.CERTIFY); + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage()); + return false; + } + LOGGER.info("Full certification of vf with name {}, invariantUUID {}, version {} finished . ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value()); + outputHandler.addRecord(certifyRes.left().value().getComponentType().name(), certifyRes.left().value().getName(), certifyRes.left().value().getUUID(), certifyRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), UpgradeStatus.UPGRADED); + return true; + } + + private boolean upgradeComponentWithLatestGeneric(org.openecomp.sdc.be.model.Component component) { + String derivedFromGenericType = component.getDerivedFromGenericType(); + String derivedFromGenericVersion = component.getDerivedFromGenericVersion(); + org.openecomp.sdc.be.model.Component updatedComponent = component; + if(StringUtils.isNotEmpty(derivedFromGenericType) && !latestGenericTypes.containsKey(derivedFromGenericType)){ + LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {}, latest derived from generic type {}, latest derived from generic version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType, derivedFromGenericVersion); + LOGGER.info("Starting to fetch latest generic node type {}. ", derivedFromGenericType); + Either, TitanOperationStatus> getDerivedRes = findDerivedResources(derivedFromGenericType); + if(getDerivedRes.isRight()){ + outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getDerivedRes.right().value()); + LOGGER.info("Failed to upgrade component with name {}, invariantUUID {}, version {} and latest generic. Status is {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType); + return false; + } + latestGenericTypes.put(derivedFromGenericType, getDerivedRes.left().value().get(0)); + } + if(StringUtils.isEmpty(derivedFromGenericType) || + latestVersionExists(latestGenericTypes.get(derivedFromGenericType), derivedFromGenericVersion) || + isVfcUpgradeRequired){ + if(StringUtils.isNotEmpty(derivedFromGenericType)) + LOGGER.info("Newer version {} of derived from generic type {} exists. ", latestGenericTypes.get(derivedFromGenericType).getJsonMetadataField(JsonPresentationFields.VERSION), derivedFromGenericType); + else + LOGGER.info("The vf resource with name {}, invariantUUID {}, version {}, has an empty derivedFromGenericType field. ", component.getName(), component.getInvariantUUID(), component.getVersion()); + + LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion()); + Either checkouRes = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false); + if(checkouRes.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CHECKOUT); + outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage()); + return false; + } + //update included VFCs, if it is required as per configuration + if(isVfcUpgradeRequired && CollectionUtils.isNotEmpty(checkouRes.left().value().getComponentInstances())){ + LOGGER.info("VFC upgrade is required: updating components of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion()); + Either updateCompositionRes = + updateComposition(checkouRes.left().value()); + if(updateCompositionRes.isRight()){ + LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage()); + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage()); + return false; + } + } + Either certifyRes = performFullCertification(checkouRes.left().value()); + if(certifyRes.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY); + outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage()); + return false; + } + updatedComponent = certifyRes.left().value(); + } else { + LOGGER.info("The version {} of derived from generic type {} is up to date. No need to upgrade component with name {}, invariantUUID {} and version {}. ", latestGenericTypes.get(derivedFromGenericType), derivedFromGenericType,component.getName(), component.getInvariantUUID(), component.getVersion()); + } + LOGGER.info(UPGRADE_COMPONENT_SUCCEEDED, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion()); + outputHandler.addRecord(updatedComponent.getComponentType().name(), updatedComponent.getName(), updatedComponent.getUUID(), updatedComponent.getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), updatedComponent.equals(component) ? UpgradeStatus.NOT_UPGRADED : UpgradeStatus.UPGRADED); + return true; + } + + private StorageOperationStatus upgradeNodeType(GraphVertex nodeTypeV, Map upgradedNodeTypesMap, List allCertifiedUids, List nodeTypes) { + StorageOperationStatus result = StorageOperationStatus.OK; + LOGGER.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION)); + LOGGER.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION)); + Either, TitanOperationStatus> parentResourceRes = titanDao.getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata); + if(parentResourceRes.isRight() && parentResourceRes.right().value() != TitanOperationStatus.NOT_FOUND ){ + return DaoStatusConverter.convertTitanStatusToStorageStatus(parentResourceRes.right().value()); + + } + List derivedResourcesUid = new ArrayList<>(); + if(parentResourceRes.isLeft()){ + for(GraphVertex chV: parentResourceRes.left().value()){ + Optional op = allCertifiedUids.stream().filter(id -> id.equals((String)chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny(); + if(op.isPresent()){ + derivedResourcesUid.add(chV); + } + } + } + String uniqueId = (String)nodeTypeV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID); + Either getRes = toscaOperationFacade.getToscaElement(uniqueId); + if(getRes.isRight()){ + LOGGER.info("failed to fetch element with uniqueId {} ", uniqueId); + return getRes.right().value(); + } + + org.openecomp.sdc.be.model.Resource nt = (Resource)getRes.left().value(); + boolean isNeedToUpgrade = true; + if(upgradedNodeTypesMap.containsKey(nt.getToscaResourceName()) || nodeTypes.stream().filter( p -> p.equals(nt.getToscaResourceName())).findAny().isPresent()){ + isNeedToUpgrade = false; + } + if(isNeedToUpgrade){ + LOGGER.info("Starting to perform check out of node type with name {}, invariantUUID {}, version {}. ", nt.getName(), nt.getInvariantUUID(), nt.getVersion()); + Either checkouRes = lifecycleBusinessLogic.changeComponentState(nt.getComponentType(), nt.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false); + if(checkouRes.isRight()){ + return StorageOperationStatus.GENERAL_ERROR; + } + org.openecomp.sdc.be.model.Component upgradetComp = checkouRes.left().value(); + boolean res = performFullCertification(upgradetComp).isLeft(); + if(!res){ + return StorageOperationStatus.GENERAL_ERROR; + } + upgradedNodeTypesMap.put(nt.getToscaResourceName(), upgradetComp); + titanDao.commit(); + } + for(GraphVertex chV: derivedResourcesUid){ + result = upgradeNodeType(chV, upgradedNodeTypesMap, allCertifiedUids, nodeTypes); + LOGGER.info("Upgrade node type with name {}, invariantUUID {}, version {} has been finished with the status {}", chV.getMetadataProperty(GraphPropertyEnum.NAME), chV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), chV.getMetadataProperty(GraphPropertyEnum.VERSION), result); + } + return result; + } + + private Either performFullCertification(org.openecomp.sdc.be.model.Component component) { + LOGGER.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ", + component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion()); + + Either changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false); + if(changeStateEither.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST); + return changeStateEither; + } + changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.START_CERTIFICATION, changeInfo, true, false); + if(changeStateEither.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION); + return changeStateEither; + } + changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false); + if(changeStateEither.isRight()){ + LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY); + } + else { + LOGGER.info("Full certification of {} with name {}, invariantUUID {}, version {} finished successfully", + changeStateEither.left().value().getComponentType().getValue(), changeStateEither.left().value().getName(), + changeStateEither.left().value().getInvariantUUID(), changeStateEither.left().value().getVersion()); + } + return changeStateEither; + } + + private Either, TitanOperationStatus> findDerivedResources(String parentResource) { + Map propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class); + propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name()); + + propertiesToMatch.put(GraphPropertyEnum.TOSCA_RESOURCE_NAME, parentResource); + propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true); + + return titanDao.getByCriteria(VertexTypeEnum.NODE_TYPE, propertiesToMatch, JsonParseFlagEnum.ParseMetadata); + } + + private boolean latestVersionExists(GraphVertex latestDerivedFrom, String currentVersion) { + return isGreater((String)latestDerivedFrom.getJsonMetadataField(JsonPresentationFields.VERSION), currentVersion); + } + + private boolean isGreater(String latestVersion, String currentVersion) { + if(latestVersion != null && currentVersion == null) + return true; + if(latestVersion == null) + return false; + return Double.parseDouble(latestVersion) > Double.parseDouble(currentVersion); + } + + private Either, TitanOperationStatus> getAllLatestCertifiedComponentUids(VertexTypeEnum vertexType, ComponentTypeEnum componentType) { + LOGGER.info("Starting to fetch all latest certified not checked out components with type {} upon upgrade migration 1710 process", componentType); + Either, TitanOperationStatus> result = null; + Map latestCertifiedMap = new HashMap<>(); + Map latestNotCertifiedMap = new HashMap<>(); + + Either, TitanOperationStatus> getComponentsRes = getAllLatestCertifiedComponents(vertexType, componentType); + if(getComponentsRes.isRight() && getComponentsRes.right().value() != TitanOperationStatus.NOT_FOUND){ + LOGGER.error("Failed to fetch all latest certified not checked out components with type {}. Status is {}. ", componentType, getComponentsRes.right().value()); + result = Either.right(getComponentsRes.right().value()); + } + if(getComponentsRes.isRight()){ + result = Either.left(new ArrayList<>()); + } + if(result == null){ + for(GraphVertex component : getComponentsRes.left().value()){ + String invariantUUID = (String)component.getJsonMetadataField(JsonPresentationFields.INVARIANT_UUID); + if(((String)component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())){ + latestCertifiedMap.put(invariantUUID, (String)component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID)); + } else { + latestNotCertifiedMap.put(invariantUUID, (String)component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID)); + } + } + result = Either.left(latestCertifiedMap.entrySet().stream().filter(e->!latestNotCertifiedMap.containsKey(e.getKey())).map(e->e.getValue()).collect(Collectors.toList())); + } + return result; + } + + private Either, TitanOperationStatus> getAllLatestCertifiedComponents(VertexTypeEnum vertexType, ComponentTypeEnum componentType){ + + Map propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class); + propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, componentType.name()); + propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true); + + Map propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class); + propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true); + if(vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE) + propertiesNotToMatch.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC.name()); + return titanDao.getByCriteria(vertexType, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata); + } + + protected Either, TitanOperationStatus> findResourcesPathRecursively(GraphVertex nodeTypeV, List allCertifiedUids) { + Either, TitanOperationStatus> parentResourceRes = titanDao.getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata); + if(parentResourceRes.isRight()){ + return Either.right(parentResourceRes.right().value()); + } + List derivedResourcesUid = new ArrayList<>(); + for(GraphVertex chV: parentResourceRes.left().value()){ + Optional op = allCertifiedUids.stream().filter(id -> id.equals((String)chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny(); + if(op.isPresent()){ + derivedResourcesUid.add(chV); + } + } + return null; + } + + private Either, StorageOperationStatus> getLatestByName(GraphPropertyEnum property, String nodeName){ + + Map propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class); + Map propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class); + + propertiesToMatch.put(property, nodeName); + propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true); + + Either, TitanOperationStatus> highestResources = titanDao.getByCriteria(null, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata); + if (highestResources.isRight()) { + TitanOperationStatus status = highestResources.right().value(); + LOGGER.debug("Failed to fetch resource with name {}. Status is {} ", nodeName, status); + return Either.right(DaoStatusConverter.convertTitanStatusToStorageStatus(status)); + } + List resources = highestResources.left().value(); + List result = new ArrayList<>(); + for(GraphVertex component:resources){ + if(((String)component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())){ + result.add(component); + } + } + return Either.left(result); + } + +} diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java index 0ab863aa3e..898bd96177 100644 --- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java +++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java @@ -24,13 +24,11 @@ import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; -import java.util.Properties; import java.util.Map.Entry; +import java.util.Properties; import javax.ws.rs.Consumes; import javax.ws.rs.GET; diff --git a/asdctool/src/main/resources/config/Artifact-Generator.properties b/asdctool/src/main/resources/config/Artifact-Generator.properties new file mode 100644 index 0000000000..1d7e5fa01a --- /dev/null +++ b/asdctool/src/main/resources/config/Artifact-Generator.properties @@ -0,0 +1,264 @@ +#action widget details +AAI.model-version-id.action=fd7fb09e-d930-41b9-b83f-cfde9df48640 +AAI.model-invariant-id.action=af593b4b-490e-4665-ad74-2f6351c0a7ce +#action-data widget details +AAI.model-invariant-id.action-data=9551346c-7d8b-4daf-9926-b93e96e2344a +AAI.model-version-id.action-data=2f80c596-27e5-4ca9-b5bb-e03a7fd4c0fd +#allotted-resource widget details +AAI.model-invariant-id.allotted-resource=f6d6a23d-a1a9-48ff-8419-b6530da2d381 +AAI.model-version-id.allotted-resource=7ad0915f-25c0-4a70-b9bc-185a75f87564 +#availability-zone widget details +AAI.model-version-id.availability-zone=6c092fb1-21b2-456b-9e01-67fb4de1896e +AAI.model-invariant-id.availability-zone=61b88c01-d819-41c0-8e21-7fd7ba47148e +#az-and-dvs-switches widget details +AAI.model-version-id.az-and-dvs-switches=b2dea88d-78a0-49bf-95c9-5819df08e966 +AAI.model-invariant-id.az-and-dvs-switches=53dc00d4-e6d9-48ec-b6cc-3d3797e9b896 +#class-of-service widget details +AAI.model-version-id.class-of-service=d2fb27cc-15eb-4c4e-828e-71d41aaecc5b +AAI.model-invariant-id.class-of-service=18094b19-d16d-4822-8acf-e92c6aefa178 +#cloud-region widget details +AAI.model-version-id.cloud-region=2a160989-b202-47dd-874b-4a0f275998f7 +AAI.model-invariant-id.cloud-region=425b2158-e51d-4509-9945-dad4556474a3 +#complex widget details +AAI.model-invariant-id.complex=af91c2f7-35fc-43cf-a13d-443f385b2353 +AAI.model-version-id.complex=3a8ab1ee-9220-4fe8-b89c-9251d160ddc2 +#connector widget details +AAI.model-version-id.connector=22104c9f-29fd-462f-be07-96cd6b46dd33 +AAI.model-invariant-id.connector=4c01c948-7607-4d66-8a6c-99c2c2717936 +#constrained-element-set widget details +AAI.model-invariant-id.constrained-element-set=c0292b4f-ee97-40cc-8c2e-f967c48f5701 +AAI.model-version-id.constrained-element-set=01102126-9c04-4a89-945b-b131e61e95d7 +#ctag-assignment widget details +AAI.model-version-id.ctag-assignment=44e5cb1f-0938-41aa-b766-d4595109fe89 +AAI.model-invariant-id.ctag-assignment=fcb8d46b-b656-4ad6-8fa4-22cef74b443f +#ctag-pool widget details +AAI.model-invariant-id.ctag-pool=46c51d4e-d67e-4a9c-b1f5-49b1e9c6fcaa +AAI.model-version-id.ctag-pool=2056c41f-23b9-4de7-9f50-819adad37d76 +#customer widget details +AAI.model-invariant-id.customer=c1d4305f-cdbd-4bbe-9069-a2f4978fd89e +AAI.model-version-id.customer=d4df5c27-98a1-4812-a8aa-c17f055b7a3f +#cvlan-tag-entry widget details +AAI.model-version-id.cvlan-tag-entry=c3878ffb-8d85-4114-bee6-e4074a9db10b +AAI.model-invariant-id.cvlan-tag-entry=245cf4b0-7cc5-4eea-bbd9-753e939adcab +#dvs-switch widget details +AAI.model-invariant-id.dvs-switch=98fbb471-1f86-428e-bd8a-c8a25de6fa23 +AAI.model-version-id.dvs-switch=4cb44ae8-e3ab-452a-9f95-bcc8a44c55ea +#edge-prop-names widget details +AAI.model-invariant-id.edge-prop-names=7a08cad4-8759-46a5-8245-095d1ba57ac6 +AAI.model-version-id.edge-prop-names=f0442326-8201-4d0e-857c-74b4ddcbfc9f +#element-choice-set widget details +AAI.model-invariant-id.element-choice-set=9a011958-7165-47a3-b872-00951d1f09ae +AAI.model-version-id.element-choice-set=af27fbfd-598d-44da-aeae-0f9d3a5fcd6a +#entitlement widget details +AAI.model-version-id.entitlement=7e27ba2e-b7db-4e13-9fae-d142152ef98a +AAI.model-invariant-id.entitlement=ae75b5a0-d5e1-4f3a-b8fb-37626a753da3 +#flavor widget details +AAI.model-invariant-id.flavor=bace8d1c-a261-4041-9e37-823117415d0f +AAI.model-version-id.flavor=36200fb5-f251-4f5d-a520-7c5ad5c2cd4b +#generic-vnf widget details +AAI.model-version-id.generic-vnf=93a6166f-b3d5-4f06-b4ba-aed48d009ad9 +AAI.model-invariant-id.generic-vnf=acc6edd8-a8d4-4b93-afaa-0994068be14c +#group-assignment widget details +AAI.model-invariant-id.group-assignment=7cc05f25-7ba2-42b7-a237-c5662a1689e1 +AAI.model-version-id.group-assignment=fe578080-ce19-4604-8760-fc264fbb2565 +#image widget details +AAI.model-version-id.image=f6a038c2-820c-42ba-8c2b-375e24e8f932 +AAI.model-invariant-id.image=3f4c7204-739b-4bbb-87a7-8a6856439c90 +#include-node-filter widget details +AAI.model-invariant-id.include-node-filter=2a2d8ad2-af0a-4e1f-9982-0c899e7dc827 +AAI.model-version-id.include-node-filter=f05f804d-7057-4ffe-bdc5-39f2f0c9c9fd +#instance-group widget details +AAI.model-version-id.instance-group=8e6ee9dc-9017-444a-83b3-219edb018128 +AAI.model-invariant-id.instance-group=3bf1e610-45f7-4ad6-b833-ca4c5ee6a3fd +#inventory-item widget details +AAI.model-invariant-id.inventory-item=cd57d844-9017-4078-aa19-926935a3d77c +AAI.model-version-id.inventory-item=69957f4a-2155-4b95-8d72-d6dd9b88b27b +#inventory-item-data widget details +AAI.model-version-id.inventory-item-data=0e54bb87-bd6e-4a2b-ad1c-6d935b87ae51 +AAI.model-invariant-id.inventory-item-data=87a383ae-cf03-432e-a9de-04e6a622d0fd +#ipsec-configuration widget details +AAI.model-invariant-id.ipsec-configuration=aca4c310-cb45-42bd-9f88-73e40ba7b962 +AAI.model-version-id.ipsec-configuration=d949fd10-36bf-408a-ac7a-cad5004d2e0d +#key-data widget details +AAI.model-version-id.key-data=c23ea04d-1a3b-453d-bc49-a6c783a5e92b +AAI.model-invariant-id.key-data=f5faa464-c2f2-4cc3-89d2-a90452dc3a07 +#l3-interface-ipv4-address-list widget details +AAI.model-version-id.l3-interface-ipv4-address-list=41e76b6f-1e06-4fd4-82cd-81c50fc4574b +AAI.model-invariant-id.l3-interface-ipv4-address-list=aad85df2-09be-40fa-b867-16415e4e10e2 +#l3-interface-ipv6-address-list widget details +AAI.model-invariant-id.l3-interface-ipv6-address-list=82966045-43ee-4982-8307-7e9610866140 +AAI.model-version-id.l3-interface-ipv6-address-list=d040621d-541a-477b-bb1b-a2b61b14e295 +#l3-network widget details +AAI.model-version-id.l3-network=9111f20f-e680-4001-b83f-19a2fc23bfc1 +AAI.model-invariant-id.l3-network=3d560d81-57d0-438b-a2a1-5334dba0651a +#lag-interface widget details +AAI.model-version-id.lag-interface=ce95f7c3-b61b-4758-ae9e-7e943b1c103d +AAI.model-invariant-id.lag-interface=e0ee9bde-c1fc-4651-a95d-8e0597bf7d70 +#lag-link widget details +AAI.model-version-id.lag-link=d29a087a-af59-4053-a3f8-0f95a92faa75 +AAI.model-invariant-id.lag-link=86ffe6e5-4d0e-4cec-80b5-5c38aa3eff98 +#license widget details +AAI.model-invariant-id.license=b9a9b337-1f86-42d3-b9f9-f987a089507c +AAI.model-version-id.license=6889274b-a1dc-40ab-9090-93677e13e2e6 +#license-key-resource widget details +AAI.model-invariant-id.license-key-resource=9022ebfe-b54f-4911-a6b2-8c3f5ec189b7 +AAI.model-version-id.license-key-resource=24b25f8c-b8bd-4c62-9421-87c12667aac9 +#l-interface widget details +AAI.model-version-id.l-interface=a32613fd-18b9-459e-aab8-fffb3912966a +AAI.model-invariant-id.l-interface=cea0a982-8d55-4093-921e-418fbccf7060 +#logical-link widget details +AAI.model-version-id.logical-link=a1481a38-f8ba-4ae4-bdf1-06c2c6af4c54 +AAI.model-invariant-id.logical-link=fe012535-2c31-4a39-a739-612374c638a0 +#metadatum widget details +AAI.model-invariant-id.metadatum=86dbb63a-265e-4614-993f-6771c30b56a5 +AAI.model-version-id.metadatum=6bae950e-8939-41d3-a6a7-251b03e4c1fc +#model widget details +AAI.model-invariant-id.model=06d1418a-5faa-452d-a94b-a2829df5f67b +AAI.model-version-id.model=1f51c05c-b164-4c27-9c03-5cbb239fd6be +#model-constraint widget details +AAI.model-invariant-id.model-constraint=c28966f3-e758-4483-b37b-a90b05d3dd33 +AAI.model-version-id.model-constraint=ad70dd19-f156-4fb5-a865-97b5563b0d37 +#model-element widget details +AAI.model-invariant-id.model-element=2076e726-3577-477a-a300-7fa65cd4df11 +AAI.model-version-id.model-element=753e813a-ba9e-4a1d-ab34-b2f6dc6eec0c +#multicast-configuration widget details +AAI.model-invariant-id.multicast-configuration=ea78c9e3-514d-4a0a-9162-13837fa54c35 +AAI.model-version-id.multicast-configuration=666a06ee-4b57-46df-bacf-908da8f10c3f +#named-query widget details +AAI.model-version-id.named-query=5c3b7c33-afa3-4be5-8da7-1a5ac6f99896 +AAI.model-invariant-id.named-query=80b712fd-0ad3-4180-a99c-8c995cf1cc32 +#named-query-element widget details +AAI.model-version-id.named-query-element=204c641a-3494-48c8-979a-86856f5fd32a +AAI.model-invariant-id.named-query-element=3c504d40-b847-424c-9d25-4fb7e0a3e994 +#network-policy widget details +AAI.model-invariant-id.network-policy=6aa05779-94d7-4d8b-9bee-59ef2ab0c246 +AAI.model-version-id.network-policy=a0ccd9dc-7062-4940-9bcc-e91dd28af510 +#network-profile widget details +AAI.model-version-id.network-profile=01f45471-4240-498c-a9e1-235dc0b8b4a6 +AAI.model-invariant-id.network-profile=2734b44a-b8a2-40f6-957d-6256589e5d00 +#newvce widget details +AAI.model-version-id.newvce=7c79e11f-a408-4593-aa86-ba948a1236af +AAI.model-invariant-id.newvce=4b05ec9c-c55d-4987-83ff-e08d6ddb694f +#oam-network widget details +AAI.model-invariant-id.oam-network=2851cf01-9c40-4064-87d4-6184a6fcff35 +AAI.model-version-id.oam-network=f4fb34f3-fd6e-4a8f-a3fb-4ab61a343b79 +#physical-link widget details +AAI.model-invariant-id.physical-link=c822d81f-822f-4304-9623-1025b53da568 +AAI.model-version-id.physical-link=9c523936-95b4-4d7f-9f53-6bdfe0cf2c05 +#p-interface widget details +AAI.model-invariant-id.p-interface=94043c37-4e73-439c-a790-0fdd697924cd +AAI.model-version-id.p-interface=d2cdb2d0-fc1f-4a57-a89e-591b1c4e3754 +#pnf widget details +AAI.model-version-id.pnf=e9f1fa7d-c839-418a-9601-03dc0d2ad687 +AAI.model-invariant-id.pnf=862b25a1-262a-4961-bdaa-cdc55d69785a +#port-group widget details +AAI.model-version-id.port-group=03e8bb6b-b48a-46ae-b5d4-e5af577e6844 +AAI.model-invariant-id.port-group=8ce940fb-55d7-4230-9e7f-a56cc2741f77 +#property-constraint widget details +AAI.model-version-id.property-constraint=81706bbd-981e-4362-ae20-995cbcb2d995 +AAI.model-invariant-id.property-constraint=f4a863c3-6886-470a-a6ae-05723837ea45 +#pserver widget details +AAI.model-invariant-id.pserver=6d932c8f-463b-4e76-83fb-87acfbaa2e2d +AAI.model-version-id.pserver=72f0d495-bc27-4653-9e1a-eef76bd34bc9 +#related-lookup widget details +AAI.model-invariant-id.related-lookup=468f6f5b-2996-41bb-b2a3-7cf9613ebb9b +AAI.model-version-id.related-lookup=0988bab5-bf4f-4938-a419-ab249867d12a +#reserved-prop-names widget details +AAI.model-invariant-id.reserved-prop-names=0c3e0ba3-618c-498d-9127-c8d42b00170f +AAI.model-version-id.reserved-prop-names=ac49d26d-9163-430e-934a-13b738a04f5c +#result-data widget details +AAI.model-version-id.result-data=4e9b50aa-5227-4f6f-b489-62e6bbc03c79 +AAI.model-invariant-id.result-data=ff656f23-6185-406f-9006-4b26834f3e1c +#route-table-reference widget details +AAI.model-version-id.route-table-reference=fed7e326-03a7-45ff-a3f2-471470d268c4 +AAI.model-invariant-id.route-table-reference=a8614b63-2636-4c4f-98df-fd448c4241db +#routing-instance widget details +AAI.model-invariant-id.routing-instance=1c2ded4f-8b01-4193-829c-966847dfec3e +AAI.model-version-id.routing-instance=3ccbcbc7-d19e-44d5-a52f-7e18aa8d69fa +#secondary-filter widget details +AAI.model-version-id.secondary-filter=1380619d-dd1a-4cec-b755-c6407833e065 +AAI.model-invariant-id.secondary-filter=738ff299-6290-4c00-8998-bd0e96a07b93 +#segmentation-assignment widget details +AAI.model-invariant-id.segmentation-assignment=6e814aee-46e1-4583-a9d4-0049bfd2b59b +AAI.model-version-id.segmentation-assignment=c5171ae0-44fb-4c04-b482-d56702241a44 +#service widget details +AAI.model-version-id.service=ecce2c42-3957-4ae0-9442-54bc6afe27b6 +AAI.model-invariant-id.service=07a3a60b-1b6c-4367-8173-8014386f89e3 +#service-capability widget details +AAI.model-invariant-id.service-capability=b1a7cc05-d19d-443b-a5d1-733e325c4232 +AAI.model-version-id.service-capability=f9cfec1b-18da-4bba-bd83-4b26cca115cd +#service-instance widget details +AAI.model-invariant-id.service-instance=82194af1-3c2c-485a-8f44-420e22a9eaa4 +AAI.model-version-id.service-instance=46b92144-923a-4d20-b85a-3cbd847668a9 +#service-subscription widget details +AAI.model-invariant-id.service-subscription=2e1a602a-acd8-4f78-94ff-618b802a303b +AAI.model-version-id.service-subscription=5e68299a-79f2-4bfb-8fbc-2bae877a2459 +#site-pair widget details +AAI.model-version-id.site-pair=7106bc02-6552-4fc3-8a56-4f3df9034531 +AAI.model-invariant-id.site-pair=db63f3e6-f8d1-484e-8d5e-191600b7914b +#site-pair-set widget details +AAI.model-invariant-id.site-pair-set=5d4dae3e-b402-4bfd-909e-ece12ff75d26 +AAI.model-version-id.site-pair-set=a5c6c1bc-dc38-468e-9459-bb08f87247df +#snapshot widget details +AAI.model-version-id.snapshot=962a7c8b-687f-4d32-a775-fe098e214bcd +AAI.model-invariant-id.snapshot=24de00ef-aead-4b52-995b-0adf8d4bd90d +#sriov-vf widget details +AAI.model-version-id.sriov-vf=1e8b331f-3d4a-4160-b7aa-f4d5a8916625 +AAI.model-invariant-id.sriov-vf=04b2935f-33c4-40a9-8af0-8b52690042dc +#start-node-filter widget details +AAI.model-version-id.start-node-filter=aad96fd3-e75f-42fc-9777-3450c36f1168 +AAI.model-invariant-id.start-node-filter=083093a3-e407-447a-ba5d-7583e4d23e1d +#subnet widget details +AAI.model-version-id.subnet=f902a6bc-6be4-4fe5-8458-a6ec0056b374 +AAI.model-invariant-id.subnet=1b2c9ba7-e449-4831-ba15-3073672f5ef2 +#tagged-inventory-item-list widget details +AAI.model-invariant-id.tagged-inventory-item-list=e78a7eaa-f65d-4919-9c2b-5b258c8c4d7e +AAI.model-version-id.tagged-inventory-item-list=c246f6e2-e3a1-4697-94c0-5672a7fbbf04 +#tenant widget details +AAI.model-invariant-id.tenant=97c26c99-6870-44c1-8a07-1d900d3f4ce6 +AAI.model-version-id.tenant=abcc54bc-bb74-49dc-9043-7f7171707545 +#tunnel-xconnect widget details +AAI.model-invariant-id.tunnel-xconnect=50b9e2fa-005c-4bbe-b651-3251dece4cd8 +AAI.model-version-id.tunnel-xconnect=e7cb4ca8-e1a5-4487-a716-4ae0bcd8aef5 +#update-node-key widget details +AAI.model-version-id.update-node-key=6004cfa6-eb6d-4062-971f-b1fde6b74aa0 +AAI.model-invariant-id.update-node-key=fe81c801-f65d-408a-b2b7-a729a18f8154 +#vce widget details +AAI.model-version-id.vce=b6cf54b5-ec45-43e1-be64-97b4e1513333 +AAI.model-invariant-id.vce=bab6dceb-e7e6-4301-a5e0-a7399b48d792 +#vf-module widget details +AAI.model-invariant-id.vf-module=ef86f9c5-2165-44f3-8fc3-96018b609ea5 +AAI.model-version-id.vf-module=c00563ae-812b-4e62-8330-7c4d0f47088a +#vig-server widget details +AAI.model-version-id.vig-server=8e8c22f1-fbdf-48ea-844c-8bdeb44e7b16 +AAI.model-invariant-id.vig-server=bed7c3b7-35d0-4cd9-abde-41b20e68b28e +#virtual-data-center widget details +AAI.model-invariant-id.virtual-data-center=5150abcf-0c5f-4593-9afe-a19c48fc4824 +AAI.model-version-id.virtual-data-center=6dd43ced-d789-47af-a759-d3abc14e3ac1 +#vlan widget details +AAI.model-version-id.vlan=257d88a5-a269-4c35-944f-aca04fbdb791 +AAI.model-invariant-id.vlan=d2b1eaf1-ae59-4116-9ee4-aa0179faa4f8 +#vnfc widget details +AAI.model-invariant-id.vnfc=96129eb9-f0de-4e05-8af2-73146473f766 +AAI.model-version-id.vnfc=5761e0a7-c6df-4d8a-9ebd-b8f445054dec +#vnf-image widget details +AAI.model-invariant-id.vnf-image=f9a628ff-7aa0-40e2-a93d-02d91c950982 +AAI.model-version-id.vnf-image=c4d3e747-ba4a-4b17-9896-94c6f18c19d3 +#volume widget details +AAI.model-version-id.volume=0fbe2e8f-4d91-4415-a772-88387049b38d +AAI.model-invariant-id.volume=ddd739b4-2b25-46c4-affc-41a32af5cc42 +#volume-group widget details +AAI.model-invariant-id.volume-group=fcec1b02-b2d0-4834-aef8-d71be04717dd +AAI.model-version-id.volume-group=99d44c90-1f61-4418-b9a6-56586bf38c79 +#vpe widget details +AAI.model-invariant-id.vpe=053ec3a7-5b72-492d-b54d-123805a9b967 +AAI.model-version-id.vpe=203817d3-829c-42d4-942d-2a935478e993 +#vpls-pe widget details +AAI.model-version-id.vpls-pe=b1566228-6785-4ce1-aea2-053736f80341 +AAI.model-invariant-id.vpls-pe=457ba89b-334c-4fbd-acc4-160ac0e0cdc0 +#vpn-binding widget details +AAI.model-invariant-id.vpn-binding=9e23b675-db2b-488b-b459-57aa9857baa0 +AAI.model-version-id.vpn-binding=21a146e5-9901-448c-9197-723076770119 +#vserver widget details +AAI.model-invariant-id.vserver=ff69d4e0-a8e8-4108-bdb0-dd63217e63c7 +AAI.model-version-id.vserver=8ecb2c5d-7176-4317-a255-26274edfdd53 \ No newline at end of file diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml index 0421d656be..48529a7c78 100644 --- a/asdctool/src/main/resources/config/configuration.yaml +++ b/asdctool/src/main/resources/config/configuration.yaml @@ -5,10 +5,9 @@ identificationHeaderFields: - HTTP_IV_REMOTE_ADDRESS - HTTP_CSP_WSTYPE - - # catalog backend hostname -beFqdn: sdccatalog.att.com +beFqdn: localhost +# sdccatalog.att.com # catalog backend http port beHttpPort: 8080 @@ -24,25 +23,43 @@ beSslPort: 8443 version: 1.0 released: 2012-11-30 -toscaConformanceLevel: 3.0 +toscaConformanceLevel: 5.0 minToscaConformanceLevel: 3.0 -titanCfgFile: src\main\resources\config\titan.properties +# These values are necessary for running upgrade migration 1710.0 process +appVersion: 1.1.0 +artifactGeneratorConfig: Artifact-Generator.properties +resourcesForUpgrade: + 5.0: + - org.openecomp.resource.cp.extCP + - tosca.nodes.network.Network + - tosca.nodes.network.Port + - org.openecomp.resource.cp.nodes.network.SubInterface +skipUpgradeFailedVfs: true +skipUpgradeVSPs: true +autoHealingOwner: jh0003 + +titanCfgFile: C:\Users\im453s\git\sdc\asdctool\src\main\resources\config\titan.properties titanMigrationKeySpaceCfgFile: src\main\resources\config\titan-migration.properties titanInMemoryGraph: false titanLockTimeout: 1800 + +# The interval to try and reconnect to titan DB when it is down during ASDC startup: titanReconnectIntervalInSeconds: 3 + +# The read timeout towards Titan DB when health check is invoked: titanHealthCheckReadTimeout: 1 + +# The interval to try and reconnect to Elasticsearch when it is down during ASDC startup: esReconnectIntervalInSeconds: 3 uebHealthCheckReconnectIntervalInSeconds: 15 uebHealthCheckReadTimeout: 4 - # Protocols protocols: - http - https - + # Default imports defaultImports: - nodes: @@ -62,13 +79,28 @@ defaultImports: users: tom: passwd bob: passwd - + neo4j: host: neo4jhost port: 7474 user: neo4j password: "12345" - + +cassandraConfig: + cassandraHosts: ['localhost'] + localDataCenter: datacenter1 + reconnectTimeout : 30000 + authenticate: false + username: koko + password: bobo + ssl: false + truststorePath : /path/path + truststorePassword : 123123 + keySpaces: + - { name: sdcaudit, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} + - { name: sdcartifact, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} + - { name: sdccomponent, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} + - { name: sdcrepository, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} #Application-specific settings of ES elasticSearch: @@ -86,7 +118,7 @@ elasticSearch: # Legal values for creationPeriod - year, month, day, hour, minute, none (meaning no time-based behaviour). # # If no creationPeriod is configured for indexPrefix, default behavour is creationPeriod: month. - + indicesTimeFrequency: - indexPrefix: auditingevents creationPeriod: month @@ -128,7 +160,9 @@ resourceTypes: &allResourceTypes - VL - VF - VFCMT - + - Abstract + - CVFC + # validForResourceTypes usage # validForResourceTypes: # - VF @@ -146,7 +180,7 @@ deploymentResourceArtifacts: # displayName: "Network HEAT Template" # type: HEAT_NET # validForResourceTypes: *allResourceTypes - + deploymentResourceInstanceArtifacts: heatEnv: displayName: "HEAT ENV" @@ -176,6 +210,8 @@ toscaArtifacts: #Informational artifacts placeHolder excludeResourceCategory: - Generic +excludeResourceType: + - PNF informationalResourceArtifacts: features: displayName: Features @@ -198,7 +234,7 @@ informationalResourceArtifacts: resourceSecurityTemplate: displayName: Resource Security Template type: OTHER - + excludeServiceCategory: informationalServiceArtifacts: @@ -241,7 +277,7 @@ informationalServiceArtifacts: serviceSecurityTemplate: displayName: Service Security Template type: OTHER - + serviceApiArtifacts: configuration: displayName: Configuration @@ -262,7 +298,6 @@ serviceApiArtifacts: displayName: Testing type: OTHER - additionalInformationMaxNumberOfKeys: 50 systemMonitoring: @@ -285,6 +320,10 @@ serviceDeploymentArtifacts: MODEL_QUERY_SPEC: acceptedTypes: - xml + UCPE_LAYER_2_CONFIGURATION: + acceptedTypes: + - xml + #AAI Artifacts AAI_SERVICE_MODEL: acceptedTypes: @@ -309,19 +348,19 @@ resourceDeploymentArtifacts: - yaml - yml validForResourceTypes: *allResourceTypes - HEAT_NESTED: + HEAT_NET: acceptedTypes: - yaml - yml validForResourceTypes: *allResourceTypes - HEAT_ARTIFACT: - acceptedTypes: - validForResourceTypes: *allResourceTypes - HEAT_NET: + HEAT_NESTED: acceptedTypes: - yaml - yml validForResourceTypes: *allResourceTypes + HEAT_ARTIFACT: + acceptedTypes: + validForResourceTypes: *allResourceTypes YANG_XML: acceptedTypes: - xml @@ -346,37 +385,55 @@ resourceDeploymentArtifacts: acceptedTypes: - xml validForResourceTypes: *allResourceTypes - #APPC Artifatcs + LIFECYCLE_OPERATIONS: + acceptedTypes: + - yaml + - yml + validForResourceTypes: + - VF + - VFC + VES_EVENTS: + acceptedTypes: + - yaml + - yml + validForResourceTypes: *allResourceTypes + PERFORMANCE_COUNTER: + acceptedTypes: + - csv + validForResourceTypes: *allResourceTypes APPC_CONFIG: acceptedTypes: validForResourceTypes: - VF - #DCAE Artifacts DCAE_TOSCA: acceptedTypes: - yml - yaml validForResourceTypes: - VF + - VFCMT DCAE_JSON: acceptedTypes: - json validForResourceTypes: - VF + - VFCMT DCAE_POLICY: acceptedTypes: - emf validForResourceTypes: - VF + - VFCMT DCAE_DOC: acceptedTypes: validForResourceTypes: - - VF + - VF + - VFCMT DCAE_EVENT: acceptedTypes: validForResourceTypes: - VF -#AAI Artifacts + - VFCMT AAI_VF_MODEL: acceptedTypes: - xml @@ -390,14 +447,13 @@ resourceDeploymentArtifacts: OTHER: acceptedTypes: validForResourceTypes: *allResourceTypes -#MIB artifacts SNMP_POLL: acceptedTypes: validForResourceTypes: *allResourceTypes SNMP_TRAP: acceptedTypes: validForResourceTypes: *allResourceTypes - + resourceInstanceDeploymentArtifacts: HEAT_ENV: acceptedTypes: @@ -405,7 +461,13 @@ resourceInstanceDeploymentArtifacts: VF_MODULES_METADATA: acceptedTypes: - json -#DCAE_VF Instance Artifacts + VES_EVENTS: + acceptedTypes: + - yaml + - yml + PERFORMANCE_COUNTER: + acceptedTypes: + - csv DCAE_INVENTORY_TOSCA: acceptedTypes: - yml @@ -422,14 +484,12 @@ resourceInstanceDeploymentArtifacts: acceptedTypes: DCAE_INVENTORY_EVENT: acceptedTypes: -#MIB artifacts SNMP_POLL: acceptedTypes: validForResourceTypes: *allResourceTypes SNMP_TRAP: acceptedTypes: validForResourceTypes: *allResourceTypes - resourceInformationalArtifacts: CHEF: @@ -461,7 +521,15 @@ resourceInformationalArtifacts: validForResourceTypes: *allResourceTypes OTHER: acceptedTypes: - validForResourceTypes: *allResourceTypes + validForResourceTypes: + - VFC + - CVFC + - CP + - VL + - VF + - VFCMT + - Abstract + - PNF SNMP_POLL: acceptedTypes: validForResourceTypes: *allResourceTypes @@ -473,15 +541,14 @@ resourceInformationalArtifacts: validForResourceTypes: - VF - VFC - + - CVFC resourceInformationalDeployedArtifacts: - requirementsToFulfillBeforeCert: capabilitiesToConsumeBeforeCert: - + unLoggedUrls: - /sdc2/rest/healthCheck @@ -493,26 +560,19 @@ cleanComponentsConfiguration: artifactsIndex: resources -cassandraConfig: - cassandraHosts: ['localhost'] - localDataCenter: - reconnectTimeout : 30000 - authenticate: true - username: asdc_user - password: Aa1234%^! - ssl: false - truststorePath : /path/path - truststorePassword : 123123 - keySpaces: - - { name: sdcaudit, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} - - { name: sdcartifact, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} - - { name: sdccomponent, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} - - { name: sdcrepository, replicationStrategy: SimpleStrategy, replicationInfo: ['1']} - - +heatEnvArtifactHeader: "" +heatEnvArtifactFooter: "" + +onboarding: + protocol: http + host: localhost + port: 8080 + downloadCsarUri: "/onboarding-api/v1.0/vendor-software-products/packages" + healthCheckUri: "/onboarding-api/v1.0/healthcheck" + switchoverDetector: - gBeFqdn: AIO-BE.ecomp.idns.cip - gFeFqdn: AIO-FE.ecomp.idns.cip + gBeFqdn: AIO-BE.ecomp.idns.cip.com + gFeFqdn: AIO-FE.ecomp.idns.cip.com beVip: 0.0.0.0 feVip: 0.0.0.0 beResolveAttempts: 3 @@ -521,24 +581,14 @@ switchoverDetector: interval: 60 changePriorityUser: ecompasdc changePriorityPassword: ecompasdc123 - publishNetworkUrl: "http://xxxxxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/update_network?user=root" + publishNetworkUrl: "http://xxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/update_network?user=root" publishNetworkBody: '{"note":"publish network"}' groups: - beSet: { changePriorityUrl: "http://xxxxxxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.com?user=root", + beSet: { changePriorityUrl: "http://xxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.com?user=root", changePriorityBody: '{"name":"AIO-BE.ecomp.idns.com","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.com","no_ad_redirection":false,"v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_be","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_be"],"failover_policy":["FAILALL"]},"comment":"AIO BE G-fqdn","intended_app_proto":"DNS"}'} - feSet: { changePriorityUrl: "http://cora.web.att.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.cip.att.com?user=root", + feSet: { changePriorityUrl: "http://xxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.com?user=root", changePriorityBody: '{"comment":"AIO G-fqdn","name":"AIO-FE.ecomp.idns.com","v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_fe","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_fe"],"failover_policy":["FAILALL"]},"no_ad_redirection":false,"intended_app_proto":"DNS","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.com"}'} - - -heatEnvArtifactHeader: "" -heatEnvArtifactFooter: "" -onboarding: - protocol: http - host: localhost - port: 8080 - downloadCsarUri: "/onboarding-api/v1.0/vendor-software-products/packages" - applicationL1Cache: datatypes: enabled: true @@ -546,9 +596,9 @@ applicationL1Cache: pollIntervalInSec: 60 applicationL2Cache: - enabled: false + enabled: true catalogL1Cache: - enabled: false + enabled: true resourcesSizeInCache: 300 servicesSizeInCache: 200 productsSizeInCache: 100 @@ -558,7 +608,8 @@ applicationL2Cache: numberOfCacheWorkers: 4 toscaValidators: - stringMaxLength: 65536 + stringMaxLength: 2500 + disableAudit: false vfModuleProperties: @@ -574,4 +625,10 @@ vfModuleProperties: vf_module_type: forBaseModule: Base forNonBaseModule: Expansion - \ No newline at end of file + +genericAssetNodeTypes: + VFC: org.openecomp.resource.abstract.nodes.VFC + CVFC: org.openecomp.resource.abstract.nodes.VFC + VF : org.openecomp.resource.abstract.nodes.VF + PNF: org.openecomp.resource.abstract.nodes.PNF + Service: org.openecomp.resource.abstract.nodes.service \ No newline at end of file diff --git a/asdctool/src/main/resources/config/elasticsearch.yml b/asdctool/src/main/resources/config/elasticsearch.yml index e1808ad7cc..38482e2b02 100644 --- a/asdctool/src/main/resources/config/elasticsearch.yml +++ b/asdctool/src/main/resources/config/elasticsearch.yml @@ -1,14 +1,15 @@ elasticSearch.local: true elasticSearch.transportclient: false -cluster.name: elasticsearch_1_5_2222 +cluster.name: elasticsearch discovery.zen.ping.multicast.enabled: false discovery.zen.ping.unicast.enabled: true -discovery.zen.ping.unicast.hosts: 1.2.3.4 +discovery.zen.ping.unicast.hosts: elasticsearch_host transport.client.initial_nodes: - - 1.2.3.4:9300 + - elasticsearch_host:9300 +http.cors.enabled: true #plugin.types: "DeleteByQueryPlugin" diff --git a/asdctool/src/main/resources/config/titan.properties b/asdctool/src/main/resources/config/titan.properties index e9daff7c7c..5411a44224 100644 --- a/asdctool/src/main/resources/config/titan.properties +++ b/asdctool/src/main/resources/config/titan.properties @@ -1,4 +1,11 @@ storage.backend=cassandra storage.hostname=localhost storage.port=9160 -storage.cassandra.keyspace=titan \ No newline at end of file + +cache.db-cache = false +cache.db-cache-clean-wait = 20 +cache.db-cache-time = 180000 +cache.db-cache-size = 0.5 + +cache.tx-cache-size = 500000 +storage.cassandra.keyspace=sdctitan diff --git a/asdctool/src/main/resources/elasticsearch.yml b/asdctool/src/main/resources/elasticsearch.yml new file mode 100644 index 0000000000..71ccdbb8f5 --- /dev/null +++ b/asdctool/src/main/resources/elasticsearch.yml @@ -0,0 +1,399 @@ + +cluster.name: elasticsearch + +discovery.zen.ping.multicast.enabled: false +discovery.zen.ping.unicast.enabled: true +discovery.zen.ping.unicast.hosts: elasticsearch_host + +http.cors.enabled: true + +path.home: "/home/vagrant/catalog-be/config" + +elasticSearch.transportclient: true + +transport.client.initial_nodes: + - elasticsearch_host:9300 + +#shield.user: asdc:Aa12345 +#shield.ssl.keystore.path: "/vagrant/install/resources/catalog-be/keystore/es-client.jks" +#shield.ssl.keystore.password: Aa123456 +#shield.transport.ssl: true + +##################### Elasticsearch Configuration Example ##################### + +# This file contains an overview of various configuration settings, +# targeted at operations staff. Application developers should +# consult the guide at . +# +# The installation procedure is covered at +# . +# +# Elasticsearch comes with reasonable defaults for most settings, +# so you can try it out without bothering with configuration. +# +# Most of the time, these defaults are just fine for running a production +# cluster. If you're fine-tuning your cluster, or wondering about the +# effect of certain configuration option, please _do ask_ on the +# mailing list or IRC channel [http://elasticsearch.org/community]. + +# Any element in the configuration can be replaced with environment variables +# by placing them in ${...} notation. For example: +# +# node.rack: ${RACK_ENV_VAR} + +# For information on supported formats and syntax for the config file, see +# + + +################################### Cluster ################################### + +# Cluster name identifies your cluster for auto-discovery. If you're running +# multiple clusters on the same network, make sure you're using unique names. +# +# cluster.name: elasticsearch + + +#################################### Node ##################################### + +# Node names are generated dynamically on startup, so you're relieved +# from configuring them manually. You can tie this node to a specific name: +# +# node.name: "Franz Kafka" + +# Every node can be configured to allow or deny being eligible as the master, +# and to allow or deny to store the data. +# +# Allow this node to be eligible as a master node (enabled by default): +# +# node.master: true +# +# Allow this node to store data (enabled by default): +# +# node.data: true + +# You can exploit these settings to design advanced cluster topologies. +# +# 1. You want this node to never become a master node, only to hold data. +# This will be the "workhorse" of your cluster. +# +# node.master: false +# node.data: true +# +# 2. You want this node to only serve as a master: to not store any data and +# to have free resources. This will be the "coordinator" of your cluster. +# +# node.master: true +# node.data: false +# +# 3. You want this node to be neither master nor data node, but +# to act as a "search load balancer" (fetching data from nodes, +# aggregating results, etc.) +# +# node.master: false +# node.data: false + +# Use the Cluster Health API [http://localhost:9200/_cluster/health], the +# Node Info API [http://localhost:9200/_nodes] or GUI tools +# such as , +# , +# and +# to inspect the cluster state. + +# A node can have generic attributes associated with it, which can later be used +# for customized shard allocation filtering, or allocation awareness. An attribute +# is a simple key value pair, similar to node.key: value, here is an example: +# +# node.rack: rack314 + +# By default, multiple nodes are allowed to start from the same installation location +# to disable it, set the following: +# node.max_local_storage_nodes: 1 + + +#################################### Index #################################### + +# You can set a number of options (such as shard/replica options, mapping +# or analyzer definitions, translog settings, ...) for indices globally, +# in this file. +# +# Note, that it makes more sense to configure index settings specifically for +# a certain index, either when creating it or by using the index templates API. +# +# See and +# +# for more information. + +# Set the number of shards (splits) of an index (5 by default): +# +# index.number_of_shards: 5 + +# Set the number of replicas (additional copies) of an index (1 by default): +# +# index.number_of_replicas: 1 + +# Note, that for development on a local machine, with small indices, it usually +# makes sense to "disable" the distributed features: +# +index.number_of_shards: 1 +index.number_of_replicas: 0 + +# These settings directly affect the performance of index and search operations +# in your cluster. Assuming you have enough machines to hold shards and +# replicas, the rule of thumb is: +# +# 1. Having more *shards* enhances the _indexing_ performance and allows to +# _distribute_ a big index across machines. +# 2. Having more *replicas* enhances the _search_ performance and improves the +# cluster _availability_. +# +# The "number_of_shards" is a one-time setting for an index. +# +# The "number_of_replicas" can be increased or decreased anytime, +# by using the Index Update Settings API. +# +# Elasticsearch takes care about load balancing, relocating, gathering the +# results from nodes, etc. Experiment with different settings to fine-tune +# your setup. + +# Use the Index Status API () to inspect +# the index status. + + +#################################### Paths #################################### + +# Path to directory containing configuration (this file and logging.yml): +# +path.conf: /src/test/resources + +# Path to directory where to store index data allocated for this node. +# +path.data: target/esdata +# +# Can optionally include more than one location, causing data to be striped across +# the locations (a la RAID 0) on a file level, favouring locations with most free +# space on creation. For example: +# +# path.data: /path/to/data1,/path/to/data2 + +# Path to temporary files: +# +path.work: /target/eswork + +# Path to log files: +# +path.logs: /target/eslogs + +# Path to where plugins are installed: +# +# path.plugins: /path/to/plugins + + +#################################### Plugin ################################### + +# If a plugin listed here is not installed for current node, the node will not start. +# +# plugin.mandatory: mapper-attachments,lang-groovy + + +################################### Memory #################################### + +# Elasticsearch performs poorly when JVM starts swapping: you should ensure that +# it _never_ swaps. +# +# Set this property to true to lock the memory: +# +# bootstrap.mlockall: true + +# Make sure that the ES_MIN_MEM and ES_MAX_MEM environment variables are set +# to the same value, and that the machine has enough memory to allocate +# for Elasticsearch, leaving enough memory for the operating system itself. +# +# You should also make sure that the Elasticsearch process is allowed to lock +# the memory, eg. by using `ulimit -l unlimited`. + + +############################## Network And HTTP ############################### + +# Elasticsearch, by default, binds itself to the 0.0.0.0 address, and listens +# on port [9200-9300] for HTTP traffic and on port [9300-9400] for node-to-node +# communication. (the range means that if the port is busy, it will automatically +# try the next port). + +# Set the bind address specifically (IPv4 or IPv6): +# +# network.bind_host: 192.168.0.1 + +# Set the address other nodes will use to communicate with this node. If not +# set, it is automatically derived. It must point to an actual IP address. +# +# network.publish_host: 192.168.0.1 + +# Set both 'bind_host' and 'publish_host': +# +# network.host: 192.168.0.1 + +# Set a custom port for the node to node communication (9300 by default): +# +# transport.tcp.port: 9300 + +# Enable compression for all communication between nodes (disabled by default): +# +# transport.tcp.compress: true + +# Set a custom port to listen for HTTP traffic: +# +# http.port: 9200 + +# Set a custom allowed content length: +# +# http.max_content_length: 100mb + +# Disable HTTP completely: +# +# http.enabled: false + + +################################### Gateway ################################### + +# The gateway allows for persisting the cluster state between full cluster +# restarts. Every change to the state (such as adding an index) will be stored +# in the gateway, and when the cluster starts up for the first time, +# it will read its state from the gateway. + +# There are several types of gateway implementations. For more information, see +# . + +# The default gateway type is the "local" gateway (recommended): +# +# gateway.type: local + +# Settings below control how and when to start the initial recovery process on +# a full cluster restart (to reuse as much local data as possible when using shared +# gateway). + +# Allow recovery process after N nodes in a cluster are up: +# +gateway.recover_after_nodes: 1 + +# Set the timeout to initiate the recovery process, once the N nodes +# from previous setting are up (accepts time value): +# +# gateway.recover_after_time: 5m + +# Set how many nodes are expected in this cluster. Once these N nodes +# are up (and recover_after_nodes is met), begin recovery process immediately +# (without waiting for recover_after_time to expire): +# +gateway.expected_nodes: 1 + + +############################# Recovery Throttling ############################# + +# These settings allow to control the process of shards allocation between +# nodes during initial recovery, replica allocation, rebalancing, +# or when adding and removing nodes. + +# Set the number of concurrent recoveries happening on a node: +# +# 1. During the initial recovery +# +# cluster.routing.allocation.node_initial_primaries_recoveries: 4 +# +# 2. During adding/removing nodes, rebalancing, etc +# +# cluster.routing.allocation.node_concurrent_recoveries: 2 + +# Set to throttle throughput when recovering (eg. 100mb, by default 20mb): +# +# indices.recovery.max_bytes_per_sec: 20mb + +# Set to limit the number of open concurrent streams when +# recovering a shard from a peer: +# +# indices.recovery.concurrent_streams: 5 + + +################################## Discovery ################################## + +# Discovery infrastructure ensures nodes can be found within a cluster +# and master node is elected. Multicast discovery is the default. + +# Set to ensure a node sees N other master eligible nodes to be considered +# operational within the cluster. Its recommended to set it to a higher value +# than 1 when running more than 2 nodes in the cluster. +# +# discovery.zen.minimum_master_nodes: 1 + +# Set the time to wait for ping responses from other nodes when discovering. +# Set this option to a higher value on a slow or congested network +# to minimize discovery failures: +# +# discovery.zen.ping.timeout: 3s + +# For more information, see +# + +# Unicast discovery allows to explicitly control which nodes will be used +# to discover the cluster. It can be used when multicast is not present, +# or to restrict the cluster communication-wise. +# +# 1. Disable multicast discovery (enabled by default): +# +# discovery.zen.ping.multicast.enabled: false +# +# 2. Configure an initial list of master nodes in the cluster +# to perform discovery when new nodes (master or data) are started: +# +# discovery.zen.ping.unicast.hosts: ["host1", "host2:port"] + +# EC2 discovery allows to use AWS EC2 API in order to perform discovery. +# +# You have to install the cloud-aws plugin for enabling the EC2 discovery. +# +# For more information, see +# +# +# See +# for a step-by-step tutorial. + +# GCE discovery allows to use Google Compute Engine API in order to perform discovery. +# +# You have to install the cloud-gce plugin for enabling the GCE discovery. +# +# For more information, see . + +# Azure discovery allows to use Azure API in order to perform discovery. +# +# You have to install the cloud-azure plugin for enabling the Azure discovery. +# +# For more information, see . + +################################## Slow Log ################################## + +# Shard level query and fetch threshold logging. + +#index.search.slowlog.threshold.query.warn: 10s +#index.search.slowlog.threshold.query.info: 5s +#index.search.slowlog.threshold.query.debug: 2s +#index.search.slowlog.threshold.query.trace: 500ms + +#index.search.slowlog.threshold.fetch.warn: 1s +#index.search.slowlog.threshold.fetch.info: 800ms +#index.search.slowlog.threshold.fetch.debug: 500ms +#index.search.slowlog.threshold.fetch.trace: 200ms + +#index.indexing.slowlog.threshold.index.warn: 10s +#index.indexing.slowlog.threshold.index.info: 5s +#index.indexing.slowlog.threshold.index.debug: 2s +#index.indexing.slowlog.threshold.index.trace: 500ms + +################################## GC Logging ################################ + +#monitor.jvm.gc.young.warn: 1000ms +#monitor.jvm.gc.young.info: 700ms +#monitor.jvm.gc.young.debug: 400ms + +#monitor.jvm.gc.old.warn: 10s +#monitor.jvm.gc.old.info: 5s +#monitor.jvm.gc.old.debug: 2s + diff --git a/asdctool/src/main/resources/scripts/UUIDFix1707.sh b/asdctool/src/main/resources/scripts/UUIDFix1707.sh index d6bd8e8ff8..0ceb0a0346 100644 --- a/asdctool/src/main/resources/scripts/UUIDFix1707.sh +++ b/asdctool/src/main/resources/scripts/UUIDFix1707.sh @@ -16,9 +16,9 @@ fi source ${FULL_PATH}/baseOperation.sh -mainClass="org.openecomp.sdc.asdctool.main.MigrationMenu" +mainClass="org.openecomp.sdc.asdctool.main.ArtifactUUIDFixMenu" -command="java $JVM_LOG_FILE -cp $JARS $mainClass fix-UUID-1707 1707 $@" +command="java $JVM_LOG_FILE -cp $JARS $mainClass $@" echo $command $command diff --git a/asdctool/src/main/resources/scripts/artifactsIdValidation.sh b/asdctool/src/main/resources/scripts/artifactsIdValidation.sh new file mode 100644 index 0000000000..9064a64c6d --- /dev/null +++ b/asdctool/src/main/resources/scripts/artifactsIdValidation.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +############################## +# Artifact Validator Tool # +############################## + +CURRENT_DIR=`pwd` +BASEDIR=$(dirname $0) + +if [ ${BASEDIR:0:1} = "/" ] +then + FULL_PATH=$BASEDIR +else + FULL_PATH=$CURRENT_DIR/$BASEDIR +fi + +source ${FULL_PATH}/baseOperation.sh + +mainClass="org.openecomp.sdc.asdctool.main.ArtifactValidatorTool" + +command="java $JVM_LOG_FILE -cp $JARS $mainClass /var/tmp/ /home/vagrant/catalog-be/config/catalog-be/" +#command="java $JVM_LOG_FILE -cp $JARS $mainClass . /apps/jetty/base/be/config/catalog-be/" +echo $command + +$command +result=$? + +echo "***********************************" +echo "***** $result *********************" +echo "***********************************" + +exit $result \ No newline at end of file diff --git a/asdctool/src/main/resources/scripts/sdc-migration.sh b/asdctool/src/main/resources/scripts/sdc-migration.sh index 1616890217..bbdd6f0f7b 100644 --- a/asdctool/src/main/resources/scripts/sdc-migration.sh +++ b/asdctool/src/main/resources/scripts/sdc-migration.sh @@ -4,6 +4,9 @@ # Data Migration ############################## +# in 1802E we do not want to execute automatic post process +exit 0 + CURRENT_DIR=`pwd` BASEDIR=$(dirname $0) diff --git a/asdctool/src/main/resources/scripts/upgradePostMigration1710.sh b/asdctool/src/main/resources/scripts/upgradePostMigration1710.sh new file mode 100644 index 0000000000..927d148d88 --- /dev/null +++ b/asdctool/src/main/resources/scripts/upgradePostMigration1710.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +############################## +# Data Migration: Upgrade Post Migration 1710 +############################## + +CURRENT_DIR=`pwd` +BASEDIR=$(dirname $0) + +if [ ${BASEDIR:0:1} = "/" ] +then + FULL_PATH=$BASEDIR +else + FULL_PATH=$CURRENT_DIR/$BASEDIR +fi + +source ${FULL_PATH}/baseOperation.sh + +mainClass="org.openecomp.sdc.asdctool.migration.main.MigrationMenu" + +command="java $JVM_LOG_FILE -cp $JARS $mainClass -c $@" +echo $command + +$command +result=$? + +echo "***********************************" +echo "***** $result *********************" +echo "***********************************" + +exit $result + + -- cgit 1.2.3-korg