aboutsummaryrefslogtreecommitdiffstats
path: root/asdctool
diff options
context:
space:
mode:
authorMichael Lando <ml636r@att.com>2018-07-29 16:13:45 +0300
committerMichael Lando <ml636r@att.com>2018-07-29 16:20:34 +0300
commit5b593496b8f1b8e8be8d7d2dbcc223332e65a49b (patch)
tree2f9dfc45191e723da69cf74be7829784e9741b94 /asdctool
parent9200382f2ce7b4bb729aa287d0878004b2d2b4f9 (diff)
re base code
Change-Id: I12a5ca14a6d8a87e9316b9ff362eb131105f98a5 Issue-ID: SDC-1566 Signed-off-by: Michael Lando <ml636r@att.com>
Diffstat (limited to 'asdctool')
-rw-r--r--asdctool/pom.xml95
-rw-r--r--asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java25
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java39
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java28
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/VrfObjectFixConfiguration.java36
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java113
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java43
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java1523
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java39
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java14
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java18
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java41
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java20
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java197
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java38
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java187
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java200
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java16
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java16
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java17
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java234
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java8
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java8
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java22
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java8
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java1
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java12
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnum.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java14
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java45
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java55
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java11
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java16
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java25
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java13
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java11
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/VrfObjectFixMenu.java52
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java12
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java24
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java5
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java81
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java1044
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java25
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java253
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java138
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java237
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigration.java76
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java31
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java8
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java3
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java15
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ConsoleWriter.java37
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ReportWriter.java50
-rw-r--r--asdctool/src/main/resources/application-context.xml4
-rw-r--r--asdctool/src/main/resources/config/configuration.yaml44
-rw-r--r--asdctool/src/main/resources/config/titan.properties25
-rw-r--r--asdctool/src/main/resources/scripts/deleteComponentTool.sh35
-rw-r--r--asdctool/src/main/resources/scripts/generateCsar.sh35
-rw-r--r--asdctool/src/main/resources/scripts/vrfObjectFix.sh35
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java56
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java10
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java5
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnumTest.java89
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java295
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java7
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertexTest.java7
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java60
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManagerTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java15
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java10
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnumTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadExTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImportTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMockTest.java23
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/DBVersionTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java12
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImplTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDaoTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolverTest.java16
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java14
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java12
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java22
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java93
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java525
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java69
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServletTest.java8
117 files changed, 4801 insertions, 2223 deletions
diff --git a/asdctool/pom.xml b/asdctool/pom.xml
index ce64b7fc1b..09e2e186a8 100644
--- a/asdctool/pom.xml
+++ b/asdctool/pom.xml
@@ -77,20 +77,20 @@
<groupId>org.onap.sdc.common</groupId>
<artifactId>onap-common-lib</artifactId>
</exclusion>
-
+
<exclusion>
<groupId>com.att.nsa</groupId>
<artifactId>cambriaClient</artifactId>
</exclusion>
<exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>dmaapClient</artifactId>
- </exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>dmaapClient</artifactId>
+ </exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
-
+
</exclusions>
<scope>compile</scope>
@@ -107,9 +107,9 @@
<artifactId>cambriaClient</artifactId>
</exclusion>
<exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>dmaapClient</artifactId>
- </exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>dmaapClient</artifactId>
+ </exclusion>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
@@ -118,46 +118,6 @@
</dependency>
- <!--Artifact Generator-->
- <dependency>
- <groupId>org.onap.sdc.common</groupId>
- <artifactId>onap-sdc-artifact-generator-api</artifactId>
- <version>${artifact-generator-api.version}</version>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.onap.sdc.common</groupId>
- <artifactId>onap-sdc-artifact-generator-core</artifactId>
- <version>${artifact-generator-core.version}</version>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.onap.sdc.common</groupId>
- <artifactId>onap-common-lib</artifactId>
- <version>${dox-common-lib.version}</version>
- <type>pom</type>
- <exclusions>
- <exclusion>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
<!-- ASDC dependencies end -->
<dependency>
@@ -354,12 +314,6 @@
<version>${commons-codec}</version>
<scope>compile</scope>
</dependency>
- <dependency>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-core</artifactId>
- <version>${jackson.version}</version>
- <scope>compile</scope>
- </dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
@@ -368,11 +322,11 @@
<scope>compile</scope>
</dependency>
+ <!-- Explicitly specified in order to override older version included by epsdk-fw -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
- <version>${jackson.annotations.version}</version>
- <scope>compile</scope>
+ <version>${jackson-annotations.version}</version>
</dependency>
<dependency>
@@ -493,15 +447,15 @@
</dependency>
<!-- CASSANDRA END -->
-
+
<!-- OPEN CSV -->
- <dependency>
- <groupId>com.opencsv</groupId>
- <artifactId>opencsv</artifactId>
- <version>4.0</version>
- <scope>compile</scope>
- </dependency>
-
+ <dependency>
+ <groupId>com.opencsv</groupId>
+ <artifactId>opencsv</artifactId>
+ <version>4.0</version>
+ <scope>compile</scope>
+ </dependency>
+
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>com.springsource.org.apache.poi</artifactId>
@@ -516,7 +470,7 @@
<scope>compile</scope>
</dependency>
- <!-- Temporary, till building the populate task which adding all components
+ <!-- Temporary, till building the populate task which adding all components
to cache. We will use Serialization Utils. -->
<dependency>
<groupId>de.ruedigermoeller</groupId>
@@ -545,7 +499,12 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
- <!-- testing end -->
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <!-- testing end -->
</dependencies>
@@ -627,7 +586,7 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.1.0</version>
- <executions>
+ <executions>
<execution>
<phase>package</phase>
<goals>
@@ -809,4 +768,4 @@
</build>
</profile>
</profiles>
-</project>
+</project> \ No newline at end of file
diff --git a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
index d6e4c11b11..a28860f1ab 100644
--- a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
+++ b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
@@ -26,7 +26,7 @@ beProtocol: http
beSslPort: <%= @ssl_port %>
version: 1.0
released: 2012-11-30
-toscaConformanceLevel: 10.0
+toscaConformanceLevel: 8.0
minToscaConformanceLevel: 3.0
titanCfgFile: <%= @titan_Path %>/titan.properties
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
index ce8b2c4ef7..e13f40fe29 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
@@ -20,30 +20,23 @@
package org.openecomp.sdc.asdctool;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.Property;
import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
-import com.thinkaurelius.titan.core.TitanFactory;
-import com.thinkaurelius.titan.core.TitanGraph;
-
-//import org.openecomp.sdc.be.auditing.impl.AuditingManager;
-
-//import org.openecomp.sdc.be.info.errors.ResponseFormat;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
public class Utils {
- private static Logger log = LoggerFactory.getLogger(Utils.class.getName());
+ private static Logger log = Logger.getLogger(Utils.class.getName());
public final static String NEW_LINE = System.getProperty("line.separator");
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
index 053596d229..2e4c2b3afa 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
@@ -1,11 +1,6 @@
package org.openecomp.sdc.asdctool.cli;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
index 267f20904c..ac75dc8310 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
@@ -1,12 +1,12 @@
package org.openecomp.sdc.asdctool.configuration;
-import java.io.File;
-
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import java.io.File;
+
public class ConfigurationUploader {
public static void uploadConfigurationFiles(String appConfigDir) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
new file mode 100644
index 0000000000..06264c669e
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
@@ -0,0 +1,39 @@
+package org.openecomp.sdc.asdctool.configuration;
+
+import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
+import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
+import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
+import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
+import org.openecomp.sdc.config.CatalogBESpringConfig;
+import org.springframework.beans.factory.config.PropertiesFactoryBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Import;
+import org.springframework.core.io.FileSystemResource;
+
+@Configuration
+@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
+@ComponentScan({"org.openecomp.sdc.asdctool.migration.config.mocks"
+ })
+public class CsarGeneratorConfiguration {
+
+ @Bean
+ public CsarGenerator csarGenerator() {
+ return new CsarGenerator();
+ }
+
+ @Bean(name = "elasticsearchConfig")
+ public PropertiesFactoryBean mapper() {
+ String configHome = System.getProperty("config.home");
+ PropertiesFactoryBean bean = new PropertiesFactoryBean();
+ bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
+ return bean;
+ }
+ @Bean(name = "serviceDistributionArtifactsBuilder")
+ public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() {
+ return new ServiceDistributionArtifactsBuilder();
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
new file mode 100644
index 0000000000..fb763189bf
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
@@ -0,0 +1,28 @@
+package org.openecomp.sdc.asdctool.configuration;
+
+import org.openecomp.sdc.asdctool.impl.internal.tool.DeleteComponentHandler;
+import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
+import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
+import org.springframework.beans.factory.config.PropertiesFactoryBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Import;
+import org.springframework.core.io.FileSystemResource;
+
+@Configuration
+@Import({DAOSpringConfig.class, CatalogModelSpringConfig.class})
+public class InternalToolConfiguration {
+ @Bean(name = "elasticsearchConfig")
+ public PropertiesFactoryBean mapper() {
+ String configHome = System.getProperty("config.home");
+ PropertiesFactoryBean bean = new PropertiesFactoryBean();
+ bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
+ return bean;
+ }
+
+ @Bean
+ public DeleteComponentHandler deleteComponentHandler() {
+ return new DeleteComponentHandler();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/VrfObjectFixConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/VrfObjectFixConfiguration.java
new file mode 100644
index 0000000000..0dd078d863
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/VrfObjectFixConfiguration.java
@@ -0,0 +1,36 @@
+package org.openecomp.sdc.asdctool.configuration;
+
+import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
+import org.openecomp.sdc.be.dao.DAOTitanStrategy;
+import org.openecomp.sdc.be.dao.TitanClientStrategy;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+
+@Configuration
+public class VrfObjectFixConfiguration {
+
+ @Bean(name = "titan-dao")
+ public TitanDao titanDao(@Qualifier("titan-client") TitanGraphClient titanClient){
+ return new TitanDao(titanClient);
+ }
+
+ @Bean(name = "titan-client")
+ @Primary
+ public TitanGraphClient titanClient(@Qualifier("dao-client-strategy") TitanClientStrategy titanClientStrategy) {
+ return new TitanGraphClient(titanClientStrategy);
+ }
+
+ @Bean(name ="dao-client-strategy")
+ public TitanClientStrategy titanClientStrategy() {
+ return new DAOTitanStrategy();
+ }
+
+ @Bean
+ public VrfObjectFixHandler vrfObjectFixHandler(@Qualifier("titan-dao") TitanDao titanDao){
+ return new VrfObjectFixHandler(titanDao);
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
index f4551a0264..302d20fea7 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
@@ -1,12 +1,11 @@
package org.openecomp.sdc.asdctool.configuration.mocks.es;
-import java.util.List;
-
+import fj.data.Either;
import org.openecomp.sdc.be.dao.api.ICatalogDAO;
import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
import org.openecomp.sdc.be.resources.data.ESArtifactData;
-import fj.data.Either;
+import java.util.List;
public class ESCatalogDAOMock implements ICatalogDAO {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
index 6f4c602c2c..42c5290ff8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
@@ -1,68 +1,57 @@
package org.openecomp.sdc.asdctool.enums;
-public enum SchemaZipFileEnum {
-
- DATA("data", "data-types", "dataTypes", "data_types", new String[]{}),
- GROUPS("groups", "group-types", "groupTypes", "group_types", new String[]{"data.yml"}),
- POLICIES("policies", "policy-types", "policyTypes","policy_types", new String[]{"data.yml"}),
- RELATIONSHIPS("relationships","relationship-types","relationshipTypes", "relationship_types", new String[]{"capabilities.yml", "data.yml", "interfaces.yml"}),
- ARTIFACTS("artifacts", "artifact-types", "artifactTypes", "artifact_types", new String[]{"data.yml"}),
- CAPABILITIES("capabilities", "capability-types", "capabilityTypes", "capability_types" ,new String[]{"data.yml"}),
- INTERFACES("interfaces", "interface-lifecycle-types", "interfaceLifecycleTypes", "interface_types", new String[]{"data.yml"});
-
- private String fileName;
- private String sourceFolderName;
- private String sourceFileName;
- private String collectionTitle;
- private String[] importFileList;
-
- private SchemaZipFileEnum(String fileName, String sourceFolderName, String sourceFileName, String collectionTitle,
- String[] importFileList) {
- this.fileName = fileName;
- this.sourceFolderName = sourceFolderName;
- this.sourceFileName = sourceFileName;
- this.collectionTitle = collectionTitle;
- this.importFileList = importFileList;
- }
-
- public String getFileName() {
- return fileName;
- }
-
- public void setFileName(String fileName) {
- this.fileName = fileName;
- }
-
- public String getSourceFolderName() {
- return sourceFolderName;
- }
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.*;
- public void setSourceFolderName(String sourceFolderName) {
- this.sourceFolderName = sourceFolderName;
- }
-
- public String getSourceFileName() {
- return sourceFileName;
- }
-
- public void setSourceFileName(String sourceFileName) {
- this.sourceFileName = sourceFileName;
- }
-
- public String getCollectionTitle() {
- return collectionTitle;
- }
-
- public void setCollectionTitle(String collectionTitle) {
- this.collectionTitle = collectionTitle;
- }
+public enum SchemaZipFileEnum {
- public String[] getImportFileList() {
- return importFileList;
- }
+ DATA("data", "data-types", "dataTypes", "data_types", EMPTY_IMPORT_LIST),
+ GROUPS("groups", "group-types", "groupTypes", "group_types", DATA_IMPORT_LIST),
+ POLICIES("policies", "policy-types", "policyTypes", "policy_types", DATA_IMPORT_LIST),
+ ANNOTATIONS("annotations", "annotation-types", "annotationTypes", "annotation_types", DATA_IMPORT_LIST),
+ RELATIONSHIPS("relationships", "relationship-types", "relationshipTypes", "relationship_types", RELATIONSHIPS_TYPES_IMPORT_LIST),
+ ARTIFACTS("artifacts", "artifact-types", "artifactTypes", "artifact_types", DATA_IMPORT_LIST),
+ CAPABILITIES("capabilities", "capability-types", "capabilityTypes", "capability_types", DATA_IMPORT_LIST),
+ INTERFACES("interfaces", "interface-lifecycle-types", "interfaceLifecycleTypes", "interface_types", DATA_IMPORT_LIST);
+
+ private String fileName;
+ private String sourceFolderName;
+ private String sourceFileName;
+ private String collectionTitle;
+ private String[] importFileList;
+
+ SchemaZipFileEnum(String fileName, String sourceFolderName, String sourceFileName, String collectionTitle,
+ String[] importFileList) {
+ this.fileName = fileName;
+ this.sourceFolderName = sourceFolderName;
+ this.sourceFileName = sourceFileName;
+ this.collectionTitle = collectionTitle;
+ this.importFileList = importFileList;
+ }
+
+ public String getFileName() {
+ return fileName;
+ }
+
+ public String getSourceFolderName() {
+ return sourceFolderName;
+ }
+
+ public String getSourceFileName() {
+ return sourceFileName;
+ }
+
+ public String getCollectionTitle() {
+ return collectionTitle;
+ }
+
+ public String[] getImportFileList() {
+ return importFileList;
+ }
+
+ static class SchemaZipConstants {
+ static final String [] EMPTY_IMPORT_LIST = new String[]{};
+ static final String [] DATA_IMPORT_LIST = new String[]{"data.yml"};
+ static final String [] RELATIONSHIPS_TYPES_IMPORT_LIST = new String[]{"capabilities.yml", "data.yml", "interfaces.yml"};
+ }
- public void setImportFileList(String[] importFileList) {
- this.importFileList = importFileList;
- }
-
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
index fa02377623..a4ee8d8a2c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
@@ -1,20 +1,9 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.BufferedWriter;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonArray;
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.utils.VfModuleArtifactPayloadEx;
import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
@@ -35,16 +24,7 @@ import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.ArtifactDefinition;
-import org.openecomp.sdc.be.model.Component;
-import org.openecomp.sdc.be.model.ComponentInstance;
-import org.openecomp.sdc.be.model.ComponentParametersView;
-import org.openecomp.sdc.be.model.DistributionStatusEnum;
-import org.openecomp.sdc.be.model.GroupDefinition;
-import org.openecomp.sdc.be.model.GroupInstance;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.Resource;
-import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.*;
import org.openecomp.sdc.be.model.jsontitan.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsontitan.datamodel.ToscaElement;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
@@ -60,17 +40,14 @@ import org.openecomp.sdc.be.tosca.ToscaRepresentation;
import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
import org.openecomp.sdc.common.api.ArtifactTypeEnum;
import org.openecomp.sdc.common.api.Constants;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.openecomp.sdc.common.util.GeneralUtility;
import org.openecomp.sdc.exception.ResponseFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonArray;
-
-import fj.data.Either;
+import java.io.*;
+import java.util.*;
+import java.util.stream.Collectors;
@org.springframework.stereotype.Component("artifactUuidFix")
public class ArtifactUuidFix {
@@ -89,7 +66,7 @@ public class ArtifactUuidFix {
@Autowired
private CsarUtils csarUtils;
- private static Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class.getName());
+ private static Logger log = Logger.getLogger(ArtifactUuidFix.class.getName());
public boolean doFix(String fixComponent, String runMode) {
List<Resource> vfLst = new ArrayList<>();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
index 8b6898d3b0..cad7a05cb2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
@@ -7,9 +7,9 @@
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -20,27 +20,11 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.lang.reflect.Type;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.EnumMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TimeZone;
+import com.carrotsearch.hppc.cursors.ObjectCursor;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import fj.data.Either;
import org.apache.commons.lang.SystemUtils;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
@@ -50,6 +34,17 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
+import org.openecomp.sdc.be.auditing.api.AuditEventFactory;
+import org.openecomp.sdc.be.auditing.impl.AuditAuthRequestEventFactory;
+import org.openecomp.sdc.be.auditing.impl.AuditConsumerEventFactory;
+import org.openecomp.sdc.be.auditing.impl.AuditGetUebClusterEventFactory;
+import org.openecomp.sdc.be.auditing.impl.category.AuditCategoryEventFactory;
+import org.openecomp.sdc.be.auditing.impl.category.AuditGetCategoryHierarchyEventFactory;
+import org.openecomp.sdc.be.auditing.impl.distribution.*;
+import org.openecomp.sdc.be.auditing.impl.resourceadmin.AuditResourceAdminEventMigrationFactory;
+import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditGetUsersListEventFactory;
+import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAccessEventFactory;
+import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAdminEventFactory;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -58,769 +53,761 @@ import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
import org.openecomp.sdc.be.resources.data.ESArtifactData;
import org.openecomp.sdc.be.resources.data.auditing.AuditingActionEnum;
import org.openecomp.sdc.be.resources.data.auditing.AuditingGenericEvent;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingGetUebClusterEvent;
import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
-import org.openecomp.sdc.be.resources.data.auditing.AuthEvent;
-import org.openecomp.sdc.be.resources.data.auditing.CategoryEvent;
-import org.openecomp.sdc.be.resources.data.auditing.ConsumerEvent;
-import org.openecomp.sdc.be.resources.data.auditing.DistributionDeployEvent;
-import org.openecomp.sdc.be.resources.data.auditing.DistributionDownloadEvent;
-import org.openecomp.sdc.be.resources.data.auditing.DistributionEngineEvent;
-import org.openecomp.sdc.be.resources.data.auditing.DistributionNotificationEvent;
-import org.openecomp.sdc.be.resources.data.auditing.DistributionStatusEvent;
-import org.openecomp.sdc.be.resources.data.auditing.GetCategoryHierarchyEvent;
-import org.openecomp.sdc.be.resources.data.auditing.GetUsersListEvent;
-import org.openecomp.sdc.be.resources.data.auditing.ResourceAdminEvent;
-import org.openecomp.sdc.be.resources.data.auditing.UserAccessEvent;
-import org.openecomp.sdc.be.resources.data.auditing.UserAdminEvent;
-import org.openecomp.sdc.common.datastructure.AuditingFieldsKeysEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.be.resources.data.auditing.model.*;
+import org.openecomp.sdc.common.datastructure.AuditingFieldsKey;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
-import com.carrotsearch.hppc.cursors.ObjectCursor;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
-
-import fj.data.Either;
+import java.io.*;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.EnumMap;
+import java.util.Map;
/**
* Created by mlando on 5/16/2016.
*/
public class DataMigration {
- private Gson gson = new Gson();
-
- private ObjectMapper jsonMapper = new ObjectMapper();
-
- private static Logger log = LoggerFactory.getLogger(DataMigration.class.getName());
+ private ObjectMapper jsonMapper = new ObjectMapper();
- private ElasticSearchClient elasticSearchClient;
+ private static Logger log = Logger.getLogger(DataMigration.class.getName());
- @Autowired
- protected AuditCassandraDao auditCassandraDao;
- @Autowired
- protected ArtifactCassandraDao artifactCassandraDao;
-
- private static final String DATE_FORMAT_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS z";
- private SimpleDateFormat simpleDateFormat;
+ private ElasticSearchClient elasticSearchClient;
+ @Autowired
+ private AuditCassandraDao auditCassandraDao;
+ @Autowired
+ private ArtifactCassandraDao artifactCassandraDao;
/**
- * the method exports and imports the records from ES to cassandra the flow
- * will check to see if the files are not empty if the files are not empty
- * the export will be skiped and the flow will use the existing files. the
- * flow will check if the tables in cassandra are empty, if the tables are
- * not empty the proces will stop and exit. if the tables are empty the
- * method will import the records from the files. in case of a fail the flow
- * will exit and clear all the Cassandra tables.
- *
- * @param appConfigDir
- * the location of the dir in wich the output files will be
- * stored
- * @param exportFromEs
- * should the es be exported again and overwrite the old export
- * @param importToCassandra
- * should we import the data into cassandra
- * @return true in case the operation was successful.
- */
- public boolean migrateDataEsToCassandra(String appConfigDir, boolean exportFromEs, boolean importToCassandra) {
- initFormater();
- if (!initEsClient()) {
- return false;
- }
- Map<Table, File> files = createOutPutFiles(appConfigDir, exportFromEs);
- if (files == null) {
- return false;
- }
- if (exportFromEs && filesEmpty(files)) {
- Map<Table, PrintWriter> printerWritersMap = createWriters(files);
- if (printerWritersMap == null) {
- return false;
- }
- try {
- ImmutableOpenMap<String, IndexMetaData> indexData = getIndexData();
- for (ObjectCursor<String> key : indexData.keys()) {
- if (("resources".equalsIgnoreCase(key.value) && !exportArtifacts(key.value, printerWritersMap))
- || (key.value.startsWith("auditingevents") && !exportAudit(key.value, printerWritersMap))) {
+ * the method exports and imports the records from ES to cassandra the flow
+ * will check to see if the files are not empty if the files are not empty
+ * the export will be skiped and the flow will use the existing files. the
+ * flow will check if the tables in cassandra are empty, if the tables are
+ * not empty the proces will stop and exit. if the tables are empty the
+ * method will import the records from the files. in case of a fail the flow
+ * will exit and clear all the Cassandra tables.
+ *
+ * @param appConfigDir
+ * the location of the dir in wich the output files will be
+ * stored
+ * @param exportFromEs
+ * should the es be exported again and overwrite the old export
+ * @param importToCassandra
+ * should we import the data into cassandra
+ * @return true in case the operation was successful.
+ */
+ public boolean migrateDataESToCassndra(String appConfigDir, boolean exportFromEs, boolean importToCassandra) {
+ if (!initEsClient()) {
+ return false;
+ }
+ Map<Table, File> files = createOutPutFiles(appConfigDir, exportFromEs);
+ if (files == null) {
+ return false;
+ }
+ if (exportFromEs && filesEmpty(files)) {
+ Map<Table, PrintWriter> printerWritersMap = createWriters(files);
+ if (printerWritersMap == null) {
+ return false;
+ }
+ try {
+ ImmutableOpenMap<String, IndexMetaData> indexData = getIndexData();
+ for (ObjectCursor<String> key : indexData.keys()) {
+ if (("resources".equalsIgnoreCase(key.value) || key.value.startsWith("auditingevents"))
+ && !exportArtifacts(key.value, printerWritersMap)) {
return false;
}
- }
- } finally {
- if (elasticSearchClient != null) {
- elasticSearchClient.close();
- }
- for (PrintWriter writer : printerWritersMap.values()) {
- writer.close();
- }
- }
- }
- return !importToCassandra || importToCassndra(files);
- }
-
- private void initFormater() {
- simpleDateFormat = new SimpleDateFormat(DATE_FORMAT_PATTERN);
- simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- }
-
- private boolean initEsClient() {
- String configHome = System.getProperty("config.home");
- URL url;
- Settings settings;
- try {
- if (SystemUtils.IS_OS_WINDOWS) {
- url = new URL("file:///" + configHome + "/elasticsearch.yml");
- } else {
- url = new URL("file:" + configHome + "/elasticsearch.yml");
- }
- log.debug("URL {}", url);
- settings = Settings.settingsBuilder().loadFromPath(Paths.get(url.toURI())).build();
- } catch (MalformedURLException | URISyntaxException e1) {
- log.error("Failed to create URL in order to load elasticsearch yml", e1);
- return true;
- }
-
- this.elasticSearchClient = new ElasticSearchClient();
- this.elasticSearchClient.setClusterName(settings.get("cluster.name"));
- this.elasticSearchClient.setLocal(settings.get("elasticSearch.local"));
- this.elasticSearchClient.setTransportClient(settings.get("elasticSearch.transportclient"));
- try {
- elasticSearchClient.initialize();
- } catch (URISyntaxException e) {
- log.error("Failed to initialize elasticSearchClient", e);
- return false;
- }
- return true;
- }
-
- /**
- * the method clears all the cassandra tables.
- */
- private void truncateCassandraTable() {
- log.info("import failed. truncating Cassandra tables.");
- artifactCassandraDao.deleteAllArtifacts();
- auditCassandraDao.deleteAllAudit();
- }
-
- /**
- * the method imports the records from the files into cassandra.
- *
- * @param files
- * a map of files holding
- * @return true if the operation was successful
- */
- private boolean importToCassndra(Map<Table, File> files) {
- log.info("starting to import date into Cassandra.");
- if (!validtaTablsNotEmpty(files)) {
- return true;
- }
- for (Table table : files.keySet()) {
- log.info("importing recordes into {}", table.getTableDescription().getTableName());
- if (!handleImport(files, table)) {
- truncateCassandraTable();
- return false;
- }
- }
- log.info("finished to import date into Cassandra.");
- return true;
- }
-
- private boolean validtaTablsNotEmpty(Map<Table, File> files) {
- for (Table table : files.keySet()) {
- Either<Boolean, CassandraOperationStatus> isTableEmptyRes = checkIfTableIsEmpty(table);
- if (isTableEmptyRes.isRight() || !isTableEmptyRes.left().value()) {
- log.error("Cassandra table {} is not empty operation aborted.",
- table.getTableDescription().getTableName());
- return false;
- }
- }
- return true;
- }
-
- /**
- * the method retrieves the fields from the given map and praprs them for
- * storage as an audit according to the table name
- *
- * @param map
- * the map from which we will retrive the fields enum values
- * @param table
- * the table we are going to store the record in.
- * @return a enummap representing the audit record that is going to be
- * created.
- */
- private EnumMap<AuditingFieldsKeysEnum, Object> createAuditMap(Map<String, String> map, Table table) {
- EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<>(AuditingFieldsKeysEnum.class);
- switch (table) {
- case USER_ADMIN_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_AFTER, map.get("USER_AFTER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_BEFORE, map.get("USER_BEFORE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- break;
- case USER_ACCESS_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_USER_UID, map.get("USER"));
- break;
- case RESOURCE_ADMIN_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_INVARIANT_UUID, map.get("INVARIANT_UUID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_STATE, map.get("CURR_STATE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_PREV_VERSION, map.get("PREV_VERSION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_PREV_STATE, map.get("PREV_STATE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DPREV_STATUS, map.get("DPREV_STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_DCURR_STATUS, map.get("DCURR_STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TOSCA_NODE_TYPE, map.get("TOSCA_NODE_TYPE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_COMMENT, map.get("COMMENT"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ARTIFACT_DATA, map.get("ARTIFACT_DATA"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_PREV_ARTIFACT_UUID, map.get("PREV_ARTIFACT_UUID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_CURR_ARTIFACT_UUID, map.get("CURR_ARTIFACT_UUID"));
- break;
- case DISTRIBUTION_DOWNLOAD_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_RESOURCE_URL, map.get("RESOURCE_URL"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
- break;
- case DISTRIBUTION_ENGINE_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- if (map.get("TOPIC_NAME") != null) {
- if (map.get("TOPIC_NAME").contains("-STATUS-")) {
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME,
- map.get("TOPIC_NAME"));
- } else {
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME,
- map.get("TOPIC_NAME"));
+ }
+ } finally {
+ if (elasticSearchClient != null) {
+ elasticSearchClient.close();
+ }
+ for (PrintWriter writer : printerWritersMap.values()) {
+ writer.close();
+ }
+ }
+ }
+ if (importToCassandra && !importToCassndra(files)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private boolean initEsClient() {
+ String configHome = System.getProperty("config.home");
+ URL url = null;
+ Settings settings = null;
+ try {
+ if (SystemUtils.IS_OS_WINDOWS) {
+ url = new URL("file:///" + configHome + "/elasticsearch.yml");
+ } else {
+ url = new URL("file:" + configHome + "/elasticsearch.yml");
+ }
+ log.debug("URL {}", url);
+ settings = Settings.settingsBuilder().loadFromPath(Paths.get(url.toURI())).build();
+ } catch (MalformedURLException | URISyntaxException e1) {
+ log.error("Failed to create URL in order to load elasticsearch yml", e1);
+ return true;
+ }
+
+ this.elasticSearchClient = new ElasticSearchClient();
+ this.elasticSearchClient.setClusterName(settings.get("cluster.name"));
+ this.elasticSearchClient.setLocal(settings.get("elasticSearch.local"));
+ this.elasticSearchClient.setTransportClient(settings.get("elasticSearch.transportclient"));
+ try {
+ elasticSearchClient.initialize();
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * the method clears all the cassandra tables
+ */
+ private void truncateCassandraTable() {
+ log.info("import failed. truncating Cassandra tables.");
+ artifactCassandraDao.deleteAllArtifacts();
+ auditCassandraDao.deleteAllAudit();
+ }
+
+ /**
+ * the method imports the records from the files into cassandra
+ *
+ * @param files
+ * a map of files holding
+ * @return true if the operation was successful
+ */
+ private boolean importToCassndra(Map<Table, File> files) {
+ log.info("starting to import date into Cassandra.");
+ if (!validtaTablsNotEmpty(files))
+ return true;
+ for (Table table : files.keySet()) {
+ log.info("importing recordes into {}", table.getTableDescription().getTableName());
+ if (!handleImport(files, table)) {
+ truncateCassandraTable();
+ return false;
+ }
+ }
+ log.info("finished to import date into Cassandra.");
+ return true;
+ }
+
+ private boolean validtaTablsNotEmpty(Map<Table, File> files) {
+ for (Table table : files.keySet()) {
+ Either<Boolean, CassandraOperationStatus> isTableEmptyRes = checkIfTableIsEmpty(table);
+ if (isTableEmptyRes.isRight() || !isTableEmptyRes.left().value()) {
+ log.error("Cassandra table {} is not empty operation aborted.",
+ table.getTableDescription().getTableName());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * the method retrieves the fields from the given map and generates
+ * corresponding audit event according to the table name
+ *
+ * @param map
+ * the map from which we will retrive the fields enum values
+ * @param table
+ * the table we are going to store the record in.
+ * @return an AuditingGenericEvent event representing the audit record that is going to be
+ * created.
+ */
+ AuditingGenericEvent createAuditEvent(Map<AuditingFieldsKey, String> map, Table table) {
+ AuditEventFactory factory = null;
+ switch (table) {
+ case USER_ADMIN_EVENT:
+ factory = getAuditUserAdminEventFactory(map);
+ break;
+ case USER_ACCESS_EVENT:
+ factory = getAuditUserAccessEventFactory(map);
+ break;
+ case RESOURCE_ADMIN_EVENT:
+ factory = getAuditResourceAdminEventMigrationFactory(map);
+ break;
+ case DISTRIBUTION_DOWNLOAD_EVENT:
+ factory = getAuditDistributionDownloadEventFactory(map);
+ break;
+ case DISTRIBUTION_ENGINE_EVENT:
+ factory = getAuditDistributionEngineEventMigrationFactory(map);
+ break;
+ case DISTRIBUTION_NOTIFICATION_EVENT:
+ factory = getAuditDistributionNotificationEventFactory(map);
+ break;
+ case DISTRIBUTION_STATUS_EVENT:
+ factory = getAuditDistributionStatusEventFactory(map);
+ break;
+ case DISTRIBUTION_DEPLOY_EVENT:
+ factory = getAuditDistributionDeployEventFactory(map);
+ break;
+ case DISTRIBUTION_GET_UEB_CLUSTER_EVENT:
+ factory = getAuditGetUebClusterEventFactory(map);
+ break;
+ case AUTH_EVENT:
+ factory = getAuditAuthRequestEventFactory(map);
+ break;
+ case CONSUMER_EVENT:
+ factory = getAuditConsumerEventFactory(map);
+ break;
+ case CATEGORY_EVENT:
+ factory = getAuditCategoryEventFactory(map);
+ break;
+ case GET_USERS_LIST_EVENT:
+ factory = getAuditGetUsersListEventFactory(map);
+ break;
+ case GET_CATEGORY_HIERARCHY_EVENT:
+ factory = getAuditGetCategoryHierarchyEventFactory(map);
+ break;
+ default:
+ break;
+ }
+ return factory != null ? factory.getDbEvent() : null;
+ }
+
+ private AuditEventFactory getAuditGetCategoryHierarchyEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditGetCategoryHierarchyEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_DETAILS),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditGetUsersListEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditGetUsersListEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_USER_DETAILS),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditCategoryEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditCategoryEventFactory(
+ AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_CATEGORY_NAME),
+ map.get(AuditingFieldsKey.AUDIT_SUB_CATEGORY_NAME),
+ map.get(AuditingFieldsKey.AUDIT_GROUPING_NAME),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditUserAccessEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditUserAccessEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_USER_UID),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditUserAdminEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditUserAdminEventFactory(
+ AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_USER_BEFORE),
+ map.get(AuditingFieldsKey.AUDIT_USER_AFTER),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditConsumerEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditConsumerEventFactory(
+ AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_ECOMP_USER),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditAuthRequestEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditAuthRequestEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_USER_UID),
+ map.get(AuditingFieldsKey.AUDIT_AUTH_URL),
+ map.get(AuditingFieldsKey.AUDIT_AUTH_REALM),
+ map.get(AuditingFieldsKey.AUDIT_AUTH_STATUS),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditGetUebClusterEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditGetUebClusterEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditResourceAdminEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditResourceAdminEventMigrationFactory(
+ AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
+ ResourceVersionInfo.newBuilder()
+ .artifactUuid(map.get(AuditingFieldsKey.AUDIT_PREV_ARTIFACT_UUID))
+ .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_STATE))
+ .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_VERSION))
+ .distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DPREV_STATUS))
+ .build(),
+ ResourceVersionInfo.newBuilder()
+ .artifactUuid(map.get(AuditingFieldsKey.AUDIT_CURR_ARTIFACT_UUID))
+ .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
+ .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
+ .distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DCURR_STATUS))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_INVARIANT_UUID),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_ARTIFACT_DATA),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_COMMENT),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_TOSCA_NODE_TYPE),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditDistributionDownloadEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditDistributionDownloadEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditDistributionEngineEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditDistributionEngineEventMigrationFactory(
+ AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ DistributionTopicData.newBuilder()
+ .notificationTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME))
+ .statusTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_API_KEY),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVRIONMENT_NAME),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ROLE),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditDistributionDeployEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditDistributionDeployEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditDistributionStatusEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditDistributionStatusEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TIME),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+ private AuditEventFactory getAuditDistributionNotificationEventFactory(Map<AuditingFieldsKey, String> map) {
+ return new AuditDistributionNotificationEventFactory(
+ CommonAuditData.newBuilder()
+ .description(map.get(AuditingFieldsKey.AUDIT_DESC))
+ .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
+ .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
+ .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
+ .build(),
+ new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
+ map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
+ ResourceVersionInfo.newBuilder()
+ .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
+ .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
+ .build(),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
+ map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
+ new OperationalEnvAuditData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVIRONMENT_ID),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_VNF_WORKLOAD_CONTEXT),
+ map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TENANT)),
+ map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
+ }
+
+
+
+ /**
+ * the method reads the content of the file intended for a given table, and
+ * sores them in cassandra
+ *
+ * @param files
+ * a map of files from which the recordes will be retrieved.
+ * @param table
+ * the name of the table we want to look up in the files and sore
+ * in Cassandra // * @param store the function to call when
+ * storing recordes in cassndra
+ * @return true if the operation was successful
+ */
+ private boolean handleImport(Map<Table, File> files, Table table) {
+ BufferedReader br = null;
+ try {
+ br = new BufferedReader(new FileReader(files.get(table)));
+ String line = null;
+ while ((line = br.readLine()) != null) {
+ CassandraOperationStatus res = CassandraOperationStatus.GENERAL_ERROR;
+ if (Table.ARTIFACT.equals(table)) {
+ res = artifactCassandraDao.saveArtifact(jsonMapper.readValue(line, ESArtifactData.class));
+ }
+ else {
+ AuditingGenericEvent recordForCassandra = createAuditRecordForCassandra(line, table);
+ if (recordForCassandra != null) {
+ res = auditCassandraDao.saveRecord(recordForCassandra);
}
- } else {
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME,
- map.get("DSTATUS_TOPIC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME,
- map.get("DNOTIF_TOPIC"));
- }
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ROLE, map.get("ROLE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_API_KEY, map.get("API_KEY"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ENVRIONMENT_NAME, map.get("D_ENV"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
- break;
- case DISTRIBUTION_NOTIFICATION_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_STATE, map.get("CURR_STATE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
- break;
- case DISTRIBUTION_STATUS_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_RESOURCE_URL, map.get("RESOURCE_URL"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_TOPIC_NAME, map.get("TOPIC_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_STATUS_TIME, map.get("STATUS_TIME"));
- break;
- case DISTRIBUTION_DEPLOY_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_ID, map.get("DID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, map.get("RESOURCE_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_CURR_VERSION, map.get("CURR_VERSION"));
- break;
- case DISTRIBUTION_GET_UEB_CLUSTER_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- if (map.get("STATUS_DESC") != null) {
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("STATUS_DESC"));
- } else {
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- }
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, map.get("CONSUMER_ID"));
- break;
- case AUTH_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_USER, map.get("USER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_URL, map.get("URL"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_STATUS, map.get("AUTH_STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_AUTH_REALM, map.get("REALM"));
- break;
- case CONSUMER_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ECOMP_USER, map.get("ECOMP_USER"));
- break;
- case CATEGORY_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, map.get("SERVICE_INSTANCE_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_CATEGORY_NAME, map.get("CATEGORY_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_SUB_CATEGORY_NAME, map.get("SUB_CATEGORY_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_GROUPING_NAME, map.get("GROUPING_NAME"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, map.get("RESOURCE_TYPE"));
- break;
- case GET_USERS_LIST_EVENT:
- case GET_CATEGORY_HIERARCHY_EVENT:
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, map.get("TIMESTAMP"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, map.get("ACTION"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DESC, map.get("DESC"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_STATUS, map.get("STATUS"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_MODIFIER_UID, map.get("MODIFIER"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_REQUEST_ID, map.get("REQUEST_ID"));
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_DETAILS, map.get("DETAILS"));
- break;
- default:
- auditingFields = null;
- break;
- }
- return auditingFields;
- }
-
- /**
- * the method reads the content of the file intended for a given table, and
- * sores them in cassandra
- *
- * @param files
- * a map of files from which the recordes will be retrieved.
- * @param table
- * the name of the table we want to look up in the files and sore
- * in Cassandra // * @param store the function to call when
- * storing recordes in cassndra
- * @return true if the operation was successful
- */
- private boolean handleImport(Map<Table, File> files, Table table) {
- try (BufferedReader br = new BufferedReader(new FileReader(files.get(table)))) {
- String line;
- while ((line = br.readLine()) != null) {
- CassandraOperationStatus res;
- if (Table.ARTIFACT.equals(table)) {
- res = artifactCassandraDao.saveArtifact(jsonMapper.readValue(line, ESArtifactData.class));
- } else {
- Type type = new TypeToken<Map<String, String>>() {}.getType();
- Map<String, String> map = gson.fromJson(line, type);
- EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = createAuditMap(map, table);
- AuditingGenericEvent recordForCassandra;
- try {
- recordForCassandra = createAuditRecord(auditingFields);
- } catch (ParseException e) {
- log.error("filed to parse time stemp in recored {}", auditingFields);
- return false;
- }
- res = auditCassandraDao.saveRecord(recordForCassandra);
- }
- if (!res.equals(CassandraOperationStatus.OK)) {
- log.error("save recored to cassndra {} failed with status {} aborting.",
- table.getTableDescription().getTableName(), res);
- return false;
- }
- }
- return true;
- } catch (IOException e) {
- log.error("failed to read file", e);
- return false;
- }
- }
-
- /**
- * the method checks if the given table is empty.
- *
- * @param table
- * the name of the table we want to check
- * @return true if the table is empty
- */
- private Either<Boolean, CassandraOperationStatus> checkIfTableIsEmpty(Table table) {
- if (Table.ARTIFACT.equals(table)) {
- return artifactCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
- } else {
- return auditCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
- }
+ }
+ if (!res.equals(CassandraOperationStatus.OK)) {
+ log.error("save recored to cassndra {} failed with status {} aborting.",
+ table.getTableDescription().getTableName(), res);
+ return false;
+ }
+ }
+ return true;
+ } catch (IOException e) {
+ log.error("failed to read file", e);
+ return false;
+ } finally {
+ if (br != null) {
+ try {
+ br.close();
+ } catch (IOException e) {
+ log.error("failed to close file reader", e);
+ }
+ }
+ }
+ }
+
+ AuditingGenericEvent createAuditRecordForCassandra(String json, Table table) throws IOException{
+ return createAuditEvent(parseToMap(json), table);
}
- private boolean filesEmpty(Map<Table, File> files) {
- for (Entry<Table, File> entry : files.entrySet()) {
- File file = entry.getValue();
- if (file.length() != 0) {
- log.info("file:{} is not empty skipping export", entry.getKey().getTableDescription().getTableName());
- return false;
- }
- }
- return true;
- }
-
- /**
- * the method reads the records from es index of audit's into a file as
- * json's.
- *
- * @param value
- * the name of the index we want
- * @param printerWritersMap
- * a map of the writers we use to write to a file.
- * @return true in case the export was successful.
- */
- private boolean exportAudit(String value, Map<Table, PrintWriter> printerWritersMap) {
- log.info("stratng to export audit data from es index{} to file.", value);
- QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
- SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(value).setScroll(new TimeValue(60000))
- .setQuery(queryBuilder).setSize(100).execute().actionGet();
- while (true) {
- for (SearchHit hit : scrollResp.getHits().getHits()) {
- PrintWriter out = printerWritersMap.get(TypeToTableMapping.getTableByType(hit.getType()));
- out.println(hit.getSourceAsString());
- }
- scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
- .setScroll(new TimeValue(60000)).execute().actionGet();
- if (scrollResp.getHits().getHits().length == 0) {
- break;
-
- }
- }
- log.info("export audit data from es to file. finished succsesfully");
- return true;
- }
-
- /**
- * the method reads the records from es index of resources into a file as
- * json's.
- *
- * @param index
- * the name of the index we want to read
- * @param printerWritersMap
- * a map of the writers we use to write to a file.
- * @return true in case the export was successful.
- */
- private boolean exportArtifacts(String index, Map<Table, PrintWriter> printerWritersMap) {
- log.info("stratng to export artifact data from es to file.");
- PrintWriter out = printerWritersMap.get(Table.ARTIFACT);
- QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
- SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(index).setScroll(new TimeValue(60000))
- .setQuery(queryBuilder).setSize(100).execute().actionGet();
- while (true) {
- for (SearchHit hit : scrollResp.getHits().getHits()) {
- out.println(hit.getSourceAsString());
- }
- scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
- .setScroll(new TimeValue(60000)).execute().actionGet();
- if (scrollResp.getHits().getHits().length == 0) {
- break;
- }
- }
- log.info("export artifact data from es to file. finished succsesfully");
- return true;
- }
-
- /**
- * the method retrieves all the indexes from elasticsearch.
- *
- * @return a map of indexes and there metadata
- */
- private ImmutableOpenMap<String, IndexMetaData> getIndexData() {
- return elasticSearchClient.getClient().admin().cluster().prepareState().get().getState().getMetaData()
- .getIndices();
- }
-
- /**
- * the method creates all the files and dir which holds them. in case the
- * files exist they will not be created again.
- *
- * @param appConfigDir
- * the base path under which the output dir will be created and
- * the export result files the created filesa are named according
- * to the name of the table into which it will be imported.
- * @param exportToEs
- * if true all the export files will be recreated
- * @return returns a map of tables and the files representing them them
- */
- private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) {
- Map<Table, File> result = new EnumMap<>(Table.class);
- File outputDir = new File(appConfigDir + "/output/");
- if (!createOutPutFolder(outputDir)) {
- return null;
- }
- for (Table table : Table.values()) {
- File file = new File(outputDir + "/" + table.getTableDescription().getTableName());
- if (exportToEs) {
- try {
- if (file.exists()) {
- Files.delete(file.toPath());
- }
- } catch (IOException e) {
- log.error("failed to delete output file {}", file.getAbsolutePath(), e);
- return null;
- }
- file = new File(outputDir + "/" + table.getTableDescription().getTableName());
- }
- if (!file.exists()) {
- try {
- file.createNewFile();
- } catch (IOException e) {
- log.error("failed to create output file {}", file.getAbsolutePath(), e);
- return null;
- }
- }
- result.put(table, file);
- }
- return result;
- }
-
- /**
- * the method create the writers to each file
- *
- * @param files
- * a map of the files according to table
- * @return returns a map of writers according to table.
- */
- private Map<Table, PrintWriter> createWriters(Map<Table, File> files) {
- Map<Table, PrintWriter> printerWritersMap = new EnumMap<>(Table.class);
- try {
- for (Entry<Table, File> entry : files.entrySet()) {
- log.info("creating writer for {}", entry.getKey());
- File file = entry.getValue();
- FileWriter fw = new FileWriter(file, true);
- BufferedWriter bw = new BufferedWriter(fw);
- PrintWriter out = new PrintWriter(bw);
- printerWritersMap.put(entry.getKey(), out);
- log.info("creating writer for {} was successful", entry.getKey());
- }
- } catch (IOException e) {
- log.error("create writer to file failed", e);
- return null;
- }
- return printerWritersMap;
- }
-
- /**
- * the method creates the output dir in case it does not exist
- *
- * @param outputDir
- * the path under wich the directory will be created.
- * @return true in case the create was succsesful or the dir already exists
- */
- private boolean createOutPutFolder(File outputDir) {
- if (!outputDir.exists()) {
- log.info("creating output dir {}", outputDir.getAbsolutePath());
- try {
- Files.createDirectories(outputDir.toPath());
- } catch (IOException e) {
- log.error("failed to create output dir {}", outputDir.getAbsolutePath(), e);
- return false;
- }
- }
- return true;
- }
-
- public enum TypeToTableMapping {
- USER_ADMIN_EVENT_TYPE(AuditingTypesConstants.USER_ADMIN_EVENT_TYPE,
- Table.USER_ADMIN_EVENT), USER_ACCESS_EVENT_TYPE(AuditingTypesConstants.USER_ACCESS_EVENT_TYPE,
- Table.USER_ACCESS_EVENT), RESOURCE_ADMIN_EVENT_TYPE(
- AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE,
- Table.RESOURCE_ADMIN_EVENT), DISTRIBUTION_DOWNLOAD_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE,
- Table.DISTRIBUTION_DOWNLOAD_EVENT), DISTRIBUTION_ENGINE_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE,
- Table.DISTRIBUTION_ENGINE_EVENT), DISTRIBUTION_NOTIFICATION_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE,
- Table.DISTRIBUTION_NOTIFICATION_EVENT), DISTRIBUTION_STATUS_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE,
- Table.DISTRIBUTION_STATUS_EVENT), DISTRIBUTION_DEPLOY_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE,
- Table.DISTRIBUTION_DEPLOY_EVENT), DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE,
- Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT), AUTH_EVENT_TYPE(
- AuditingTypesConstants.AUTH_EVENT_TYPE,
- Table.AUTH_EVENT), CONSUMER_EVENT_TYPE(
- AuditingTypesConstants.CONSUMER_EVENT_TYPE,
- Table.CONSUMER_EVENT), CATEGORY_EVENT_TYPE(
- AuditingTypesConstants.CATEGORY_EVENT_TYPE,
- Table.CATEGORY_EVENT), GET_USERS_LIST_EVENT_TYPE(
- AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE,
- Table.GET_USERS_LIST_EVENT), GET_CATEGORY_HIERARCHY_EVENT_TYPE(
- AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE,
- Table.GET_CATEGORY_HIERARCHY_EVENT);
-
- String typeName;
- Table table;
-
- TypeToTableMapping(String typeName, Table table) {
- this.typeName = typeName;
- this.table = table;
- }
-
- public String getTypeName() {
- return typeName;
- }
-
- public Table getTable() {
- return table;
- }
-
- public static Table getTableByType(String type) {
- for (TypeToTableMapping mapping : TypeToTableMapping.values()) {
- if (mapping.getTypeName().equalsIgnoreCase(type)) {
- return mapping.getTable();
- }
- }
- return null;
- }
- }
-
- public AuditingGenericEvent createAuditRecord(EnumMap<AuditingFieldsKeysEnum, Object> auditingFields)
- throws ParseException {
- AuditingActionEnum actionEnum = AuditingActionEnum
- .getActionByName((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_ACTION));
- String tableName = actionEnum.getAuditingEsType();
- AuditingGenericEvent event;
- Date date;
- switch (tableName) {
- case AuditingTypesConstants.USER_ADMIN_EVENT_TYPE:
- UserAdminEvent userAdminEvent = new UserAdminEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- userAdminEvent.setTimestamp1(date);
- event = userAdminEvent;
- break;
- case AuditingTypesConstants.AUTH_EVENT_TYPE:
- AuthEvent authEvent = new AuthEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- authEvent.setTimestamp1(date);
- event = authEvent;
- break;
- case AuditingTypesConstants.CATEGORY_EVENT_TYPE:
- CategoryEvent categoryEvent = new CategoryEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- categoryEvent.setTimestamp1(date);
- event = categoryEvent;
- break;
- case AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE:
- ResourceAdminEvent resourceAdminEvent = new ResourceAdminEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- resourceAdminEvent.setTimestamp1(date);
- event = resourceAdminEvent;
- break;
- case AuditingTypesConstants.USER_ACCESS_EVENT_TYPE:
- UserAccessEvent userAccessEvent = new UserAccessEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- userAccessEvent.setTimestamp1(date);
- event = userAccessEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE:
- DistributionStatusEvent distributionStatusEvent = new DistributionStatusEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- distributionStatusEvent.setTimestamp1(date);
- event = distributionStatusEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE:
- DistributionDownloadEvent distributionDownloadEvent = new DistributionDownloadEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- distributionDownloadEvent.setTimestamp1(date);
- event = distributionDownloadEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE:
- DistributionEngineEvent distributionEngineEvent = new DistributionEngineEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- distributionEngineEvent.setTimestamp1(date);
- event = distributionEngineEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE:
- DistributionNotificationEvent distributionNotificationEvent = new DistributionNotificationEvent(
- auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- distributionNotificationEvent.setTimestamp1(date);
- event = distributionNotificationEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE:
- DistributionDeployEvent distributionDeployEvent = new DistributionDeployEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- distributionDeployEvent.setTimestamp1(date);
- event = distributionDeployEvent;
- break;
- case AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE:
- AuditingGetUebClusterEvent auditingGetUebClusterEvent = new AuditingGetUebClusterEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- auditingGetUebClusterEvent.setTimestamp1(date);
- event = auditingGetUebClusterEvent;
- break;
- case AuditingTypesConstants.CONSUMER_EVENT_TYPE:
- ConsumerEvent consumerEvent = new ConsumerEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- consumerEvent.setTimestamp1(date);
- event = consumerEvent;
- break;
- case AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE:
- GetUsersListEvent getUsersListEvent = new GetUsersListEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- getUsersListEvent.setTimestamp1(date);
- event = getUsersListEvent;
- break;
- case AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE:
- GetCategoryHierarchyEvent getCategoryHierarchyEvent = new GetCategoryHierarchyEvent(auditingFields);
- date = simpleDateFormat.parse((String) auditingFields.get(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP));
- getCategoryHierarchyEvent.setTimestamp1(date);
- event = getCategoryHierarchyEvent;
- break;
- default:
- event = null;
- break;
- }
- return event;
- }
+ private Map<AuditingFieldsKey, String> parseToMap(String json) throws IOException {
+ return jsonMapper.readValue(json, new TypeReference<Map<AuditingFieldsKey, String>>(){});
+ }
+
+ /**
+ * the method checks if the given table is empty
+ *
+ * @param table
+ * the name of the table we want to check
+ * @return true if the table is empty
+ */
+ private Either<Boolean, CassandraOperationStatus> checkIfTableIsEmpty(Table table) {
+ if (Table.ARTIFACT.equals(table)) {
+ return artifactCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
+ } else {
+ return auditCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
+ }
+ }
+
+ private boolean filesEmpty(Map<Table, File> files) {
+ for (Table table : files.keySet()) {
+ File file = files.get(table);
+ if (file.length() != 0) {
+ log.info("file:{} is not empty skipping export", table.getTableDescription().getTableName());
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * the method reads the records from es index of audit's into a file as
+ * json's.
+ *
+ * @param value
+ * the name of the index we want
+ * @param printerWritersMap
+ * a map of the writers we use to write to a file.
+ * @return true in case the export was successful.
+ */
+ private boolean exportAudit(String value, Map<Table, PrintWriter> printerWritersMap) {
+ log.info("stratng to export audit data from es index{} to file.", value);
+ QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
+ SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(value).setScroll(new TimeValue(60000))
+ .setQuery(queryBuilder).setSize(100).execute().actionGet();
+ while (true) {
+ for (SearchHit hit : scrollResp.getHits().getHits()) {
+ PrintWriter out = printerWritersMap.get(TypeToTableMapping.getTableByType(hit.getType()));
+ out.println(hit.getSourceAsString());
+ }
+ scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
+ .setScroll(new TimeValue(60000)).execute().actionGet();
+ if (scrollResp.getHits().getHits().length == 0) {
+ break;
+
+ }
+ }
+
+ log.info("export audit data from es to file. finished succsesfully");
+ return true;
+ }
+
+ /**
+ * the method reads the records from es index of resources into a file as
+ * json's.
+ *
+ * @param index
+ * the name of the index we want to read
+ * @param printerWritersMap
+ * a map of the writers we use to write to a file.
+ * @return true in case the export was successful.
+ */
+ private boolean exportArtifacts(String index, Map<Table, PrintWriter> printerWritersMap) {
+ log.info("stratng to export artifact data from es to file.");
+ PrintWriter out = printerWritersMap.get(Table.ARTIFACT);
+ QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
+ SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(index).setScroll(new TimeValue(60000))
+ .setQuery(queryBuilder).setSize(100).execute().actionGet();
+ while (true) {
+ for (SearchHit hit : scrollResp.getHits().getHits()) {
+ ;
+ out.println(hit.getSourceAsString());
+ }
+ scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
+ .setScroll(new TimeValue(60000)).execute().actionGet();
+ if (scrollResp.getHits().getHits().length == 0) {
+ break;
+
+ }
+ }
+
+ log.info("export artifact data from es to file. finished succsesfully");
+ return true;
+ }
+
+ /**
+ * the method retrieves all the indexes from elasticsearch
+ *
+ * @return a map of indexes and there metadata
+ */
+ private ImmutableOpenMap<String, IndexMetaData> getIndexData() {
+ return elasticSearchClient.getClient().admin().cluster().prepareState().get().getState().getMetaData()
+ .getIndices();
+ }
+
+ /**
+ * the method creates all the files and dir which holds them. in case the
+ * files exist they will not be created again.
+ *
+ * @param appConfigDir
+ * the base path under which the output dir will be created and
+ * the export result files the created filesa are named according
+ * to the name of the table into which it will be imported.
+ * @param exportToEs
+ * if true all the export files will be recreated
+ * @returnthe returns a map of tables and the files representing them them
+ */
+ private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) {
+ Map<Table, File> result = new EnumMap<Table, File>(Table.class);
+ File outputDir = new File(appConfigDir + "/output/");
+ if (!createOutPutFolder(outputDir)) {
+ return null;
+ }
+ for (Table table : Table.values()) {
+ File file = new File(outputDir + "/" + table.getTableDescription().getTableName());
+ if (exportToEs) {
+ try {
+ if (file.exists()) {
+ Files.delete(file.toPath());
+ }
+ } catch (IOException e) {
+ log.error("failed to delete output file {}", file.getAbsolutePath(), e);
+ return null;
+ }
+ file = new File(outputDir + "/" + table.getTableDescription().getTableName());
+ }
+ if (!file.exists()) {
+ try {
+ file.createNewFile();
+ } catch (IOException e) {
+ log.error("failed to create output file {}", file.getAbsolutePath(), e);
+ return null;
+ }
+ }
+ result.put(table, file);
+
+ }
+ return result;
+ }
+
+ /**
+ * the method create the writers to each file
+ *
+ * @param files
+ * a map of the files according to table
+ * @return returns a map of writers according to table.
+ */
+ private Map<Table, PrintWriter> createWriters(Map<Table, File> files) {
+ Map<Table, PrintWriter> printerWritersMap = new EnumMap<>(Table.class);
+ try {
+ for (Table table : files.keySet()) {
+ log.info("creating writer for {}", table);
+ File file = files.get(table);
+ FileWriter fw = new FileWriter(file, true);
+ BufferedWriter bw = new BufferedWriter(fw);
+ PrintWriter out = new PrintWriter(bw);
+ printerWritersMap.put(table, out);
+ log.info("creating writer for {} was successful", table);
+ }
+ } catch (IOException e) {
+ log.error("create writer to file failed", e);
+ return null;
+ }
+ return printerWritersMap;
+ }
+
+ /**
+ * the method creates the output dir in case it does not exist
+ *
+ * @param outputDir
+ * the path under wich the directory will be created.
+ * @return true in case the create was succsesful or the dir already exists
+ */
+ private boolean createOutPutFolder(File outputDir) {
+ if (!outputDir.exists()) {
+ log.info("creating output dir {}", outputDir.getAbsolutePath());
+ try {
+ Files.createDirectories(outputDir.toPath());
+ } catch (IOException e) {
+ log.error("failed to create output dir {}", outputDir.getAbsolutePath(), e);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public enum TypeToTableMapping {
+ USER_ADMIN_EVENT_TYPE(AuditingTypesConstants.USER_ADMIN_EVENT_TYPE,
+ Table.USER_ADMIN_EVENT), USER_ACCESS_EVENT_TYPE(AuditingTypesConstants.USER_ACCESS_EVENT_TYPE,
+ Table.USER_ACCESS_EVENT), RESOURCE_ADMIN_EVENT_TYPE(
+ AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE,
+ Table.RESOURCE_ADMIN_EVENT), DISTRIBUTION_DOWNLOAD_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE,
+ Table.DISTRIBUTION_DOWNLOAD_EVENT), DISTRIBUTION_ENGINE_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE,
+ Table.DISTRIBUTION_ENGINE_EVENT), DISTRIBUTION_NOTIFICATION_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE,
+ Table.DISTRIBUTION_NOTIFICATION_EVENT), DISTRIBUTION_STATUS_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE,
+ Table.DISTRIBUTION_STATUS_EVENT), DISTRIBUTION_DEPLOY_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE,
+ Table.DISTRIBUTION_DEPLOY_EVENT), DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE(
+ AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE,
+ Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT), AUTH_EVENT_TYPE(
+ AuditingTypesConstants.AUTH_EVENT_TYPE,
+ Table.AUTH_EVENT), CONSUMER_EVENT_TYPE(
+ AuditingTypesConstants.CONSUMER_EVENT_TYPE,
+ Table.CONSUMER_EVENT), CATEGORY_EVENT_TYPE(
+ AuditingTypesConstants.CATEGORY_EVENT_TYPE,
+ Table.CATEGORY_EVENT), GET_USERS_LIST_EVENT_TYPE(
+ AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE,
+ Table.GET_USERS_LIST_EVENT), GET_CATEGORY_HIERARCHY_EVENT_TYPE(
+ AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE,
+ Table.GET_CATEGORY_HIERARCHY_EVENT);
+
+ String typeName;
+ Table table;
+
+ TypeToTableMapping(String typeName, Table table) {
+ this.typeName = typeName;
+ this.table = table;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public Table getTable() {
+ return table;
+ }
+
+ public static Table getTableByType(String type) {
+ for (TypeToTableMapping mapping : TypeToTableMapping.values()) {
+ if (mapping.getTypeName().equalsIgnoreCase(type)) {
+ return mapping.getTable();
+ }
+ }
+ return null;
+ }
+ }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
index 23019b9b15..3d95de7d5c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
@@ -21,6 +21,9 @@
package org.openecomp.sdc.asdctool.impl;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
@@ -28,17 +31,12 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-
/**
* simple util class to verify that the titan export json graph is not corrupted
*/
public class GraphJsonValidator {
- private static Logger log = LoggerFactory.getLogger(GraphJsonValidator.class.getName());
+ private static Logger log = Logger.getLogger(GraphJsonValidator.class.getName());
public boolean verifyTitanJson(String filePath) throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
index 5fb5c7e7cf..0ec7af4906 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
@@ -20,30 +20,11 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
+import com.google.gson.Gson;
+import com.thinkaurelius.titan.core.*;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.apache.tinkerpop.gremlin.structure.Direction;
-import org.apache.tinkerpop.gremlin.structure.Edge;
-import org.apache.tinkerpop.gremlin.structure.Element;
-import org.apache.tinkerpop.gremlin.structure.Graph;
-import org.apache.tinkerpop.gremlin.structure.Property;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.*;
import org.apache.tinkerpop.gremlin.structure.io.IoCore;
import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper;
import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader;
@@ -52,19 +33,15 @@ import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
import org.openecomp.sdc.asdctool.Utils;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
-import com.google.gson.Gson;
-import com.thinkaurelius.titan.core.TitanEdge;
-import com.thinkaurelius.titan.core.TitanFactory;
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.thinkaurelius.titan.core.TitanGraphQuery;
-import com.thinkaurelius.titan.core.TitanVertex;
+import java.io.*;
+import java.util.*;
+import java.util.Map.Entry;
public class GraphMLConverter {
- private static Logger log = LoggerFactory.getLogger(GraphMLConverter.class.getName());
+ private static Logger log = Logger.getLogger(GraphMLConverter.class.getName());
private Gson gson = new Gson();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
index f34feb0af6..863f920b47 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
@@ -20,13 +20,6 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
@@ -39,6 +32,13 @@ import org.jdom2.util.IteratorIterable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
public class GraphMLDataAnalyzer {
private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
index a8674f8a1b..40fd6fb3d0 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
@@ -20,26 +20,24 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
-import com.thinkaurelius.titan.core.TitanFactory;
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.thinkaurelius.titan.core.TitanVertex;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
/**
* Created by mlando on 2/23/2016.
*/
public class ProductLogic {
- private static Logger log = LoggerFactory.getLogger(ProductLogic.class.getName());
+ private static Logger log = Logger.getLogger(ProductLogic.class.getName());
public boolean deleteAllProducts(String titanFile, String beHost, String bePort, String adminUser) {
log.debug("retrieving all products from graph");
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
index 50781f25c2..a886367561 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
@@ -20,13 +20,12 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.Properties;
-
import org.apache.http.HttpStatus;
import org.openecomp.sdc.common.http.client.api.HttpRequest;
import org.openecomp.sdc.common.http.client.api.HttpResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
+import java.util.Properties;
/**
* Created by mlando on 2/23/2016.
@@ -35,7 +34,7 @@ public class RestUtils {
final static String DELETE_PRODUCT = "http://%s:%s/sdc2/rest/v1/catalog/products/%s";
- private static Logger log = LoggerFactory.getLogger(RestUtils.class.getName());
+ private static Logger log = Logger.getLogger(RestUtils.class.getName());
public RestUtils() {
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
index ae226f1a82..28a5bbdb24 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
@@ -20,10 +20,10 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
+import com.thinkaurelius.titan.core.*;
+import com.thinkaurelius.titan.core.schema.ConsistencyModifier;
+import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
+import com.thinkaurelius.titan.core.schema.TitanManagement;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.be.dao.graph.datatype.ActionEnum;
@@ -39,14 +39,9 @@ import org.openecomp.sdc.be.resources.data.UserData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.thinkaurelius.titan.core.PropertyKey;
-import com.thinkaurelius.titan.core.TitanException;
-import com.thinkaurelius.titan.core.TitanFactory;
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.thinkaurelius.titan.core.TitanGraphQuery;
-import com.thinkaurelius.titan.core.schema.ConsistencyModifier;
-import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
-import com.thinkaurelius.titan.core.schema.TitanManagement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
public class TitanGraphInitializer {
@@ -90,6 +85,10 @@ public class TitanGraphInitializer {
return false;
}
+ private static boolean isVertexNotExist(Map<String, Object> properties) {
+ return !isVertexExist(properties);
+ }
+
private static void createDefaultAdminUser() {
createUser(getDefaultUserAdmin());
graph.tx().commit();
@@ -187,13 +186,19 @@ public class TitanGraphInitializer {
createVertexIndixes();
createEdgeIndixes();
createDefaultAdminUser();
- createRootCatalogVertex();
+ createRootVertex(VertexTypeEnum.CATALOG_ROOT);
+ createRootVertex(VertexTypeEnum.ARCHIVE_ROOT);
}
- private static void createRootCatalogVertex() {
- Vertex vertex = graph.addVertex();
- vertex.property(GraphPropertyEnum.UNIQUE_ID.getProperty(), IdBuilderUtils.generateUniqueId());
- vertex.property(GraphPropertyEnum.LABEL.getProperty(), VertexTypeEnum.CATALOG_ROOT.getName());
- graph.tx().commit();
+ private static void createRootVertex(VertexTypeEnum vertexTypeEnum) {
+ Map<String, Object> checkedProperties = new HashMap<>();
+ checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), vertexTypeEnum.getName());
+ if (isVertexNotExist(checkedProperties)) {
+ Vertex vertex = graph.addVertex();
+ vertex.property(GraphPropertyEnum.UNIQUE_ID.getProperty(), IdBuilderUtils.generateUniqueId());
+ vertex.property(GraphPropertyEnum.LABEL.getProperty(), vertexTypeEnum.getName());
+ graph.tx().commit();
+ }
}
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
index 6f0136cba3..a5e0449b6b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
@@ -20,25 +20,23 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanGraphQuery;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.asdctool.Utils;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
-import com.thinkaurelius.titan.core.TitanFactory;
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.thinkaurelius.titan.core.TitanGraphQuery;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
public class UpdatePropertyOnVertex {
- private static Logger log = LoggerFactory.getLogger(UpdatePropertyOnVertex.class.getName());
+ private static Logger log = Logger.getLogger(UpdatePropertyOnVertex.class.getName());
public Integer updatePropertyOnServiceAtLeastCertified(String titanFile, Map<String, Object> keyValueToSet,
List<Map<String, Object>> orCriteria) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
new file mode 100644
index 0000000000..8eec51071f
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
@@ -0,0 +1,197 @@
+package org.openecomp.sdc.asdctool.impl;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.utils.JsonParserUtils;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
+import org.openecomp.sdc.be.model.jsontitan.enums.JsonConstantKeysEnum;
+import org.openecomp.sdc.be.model.operations.StorageException;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
+import java.io.IOException;
+import java.util.*;
+
+import static java.util.Collections.emptyList;
+import static java.util.stream.Collectors.toList;
+
+@org.springframework.stereotype.Component("vrfObjectFixHandler")
+public class VrfObjectFixHandler {
+
+ private static final Logger log = Logger.getLogger(VrfObjectFixHandler.class);
+ private static final String VALID_TOSCA_NAME = "org.openecomp.nodes.VRFObject";
+ private static final Object[] outputTableTitle =
+ new String[]{"VRF OBJECT VERSION",
+ "CONTAINER NAME",
+ "CONTAINER UNIQUE ID",
+ "INSTANCE NAME",
+ "INSTANCE UNIQUE ID"};
+
+ private XlsOutputHandler outputHandler;
+ private final String sheetName = this.getClass().getSimpleName() + "Report";
+
+ private TitanDao titanDao;
+
+ public VrfObjectFixHandler(TitanDao titanDao) {
+ this.titanDao = titanDao;
+ }
+
+ public boolean handle(String mode, String outputPath) {
+ outputHandler = new XlsOutputHandler(outputPath, sheetName, outputTableTitle);
+ switch (mode){
+ case "detect" :
+ return detectCorruptedData();
+ case "fix":
+ return fixCorruptedData();
+ default :
+ log.debug("#handle - The invalid mode parameter has been received: {}", mode);
+ return false;
+ }
+ }
+
+ private boolean fixCorruptedData(){
+ try{
+ Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
+ corruptedData.forEach(this::fixCorruptedVfrObjectAndRelatedInstances);
+ titanDao.commit();
+ writeOutput(corruptedData);
+ } catch (Exception e){
+ titanDao.rollback();
+ log.debug("#fixCorruptedData - Failed to detect corrupted data. The exception occurred: ", e);
+ return false;
+ }
+ return true;
+ }
+
+ private boolean detectCorruptedData(){
+ try{
+ Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
+ writeOutput(corruptedData);
+ } catch (Exception e){
+ log.debug("#detectCorruptedData - Failed to detect corrupted data. The exception occurred: ", e);
+ return false;
+ }
+ return true;
+ }
+
+ private void fixCorruptedVfrObjectAndRelatedInstances(GraphVertex vfrObjectV, Map<Vertex, List<ComponentInstanceDataDefinition>> instances) {
+ fixCorruptedVfrObject(vfrObjectV);
+ instances.forEach(this::fixCorruptedContainerInstances);
+ }
+
+ private void fixCorruptedVfrObject(GraphVertex vfrObjectV) {
+ vfrObjectV.getMetadataProperties().put(GraphPropertyEnum.TOSCA_RESOURCE_NAME, VALID_TOSCA_NAME);
+ titanDao.updateVertex(vfrObjectV).left().on(this::rightOnUpdate);
+ }
+
+ private Map<GraphVertex,Map<Vertex,List<ComponentInstanceDataDefinition>>> fetchCorruptedData(){
+ Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = new HashMap<>();
+ List<GraphVertex> vrfObjectsV = getCorruptedVrfObjects();
+ vrfObjectsV.forEach(vrfObjectV-> fillCorruptedData(vrfObjectV, corruptedData));
+ return corruptedData;
+ }
+
+ private List<GraphVertex> getCorruptedVrfObjects() {
+ Map<GraphPropertyEnum, Object> props = new EnumMap<>(GraphPropertyEnum.class);
+ props.put(GraphPropertyEnum.TOSCA_RESOURCE_NAME, "org.openecomp.resource.configuration.VRFObject");
+ return titanDao.getByCriteria(VertexTypeEnum.NODE_TYPE, props).left().on(this::rightOnGet);
+ }
+
+ private void fillCorruptedData(GraphVertex vrfObjectV, Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> findToUpdate) {
+ Map<Vertex, List<ComponentInstanceDataDefinition>> corruptedInstances = new HashMap<>();
+ findToUpdate.put(vrfObjectV, corruptedInstances);
+ Iterator<Edge> instanceEdges = vrfObjectV.getVertex().edges(Direction.IN, EdgeLabelEnum.INSTANCE_OF.name());
+ while(instanceEdges.hasNext()){
+ Edge edge = instanceEdges.next();
+ putCorruptedInstances(corruptedInstances, edge, (List<String>) titanDao.getProperty(edge, EdgePropertyEnum.INSTANCES));
+ }
+ }
+
+ private void putCorruptedInstances(Map<Vertex, List<ComponentInstanceDataDefinition>> corruptedInstances, Edge edge, List<String> ids) {
+ if(CollectionUtils.isNotEmpty(ids)){
+ Vertex container = edge.outVertex();
+ Map<String, ? extends ToscaDataDefinition> jsonObj = getJsonMap(container);
+ CompositionDataDefinition composition = (CompositionDataDefinition)jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+ corruptedInstances.put(container, composition.getComponentInstances()
+ .values()
+ .stream()
+ .filter(i->ids.contains(i.getUniqueId()))
+ .collect(toList()));
+ }
+ }
+
+ private void fixCorruptedContainerInstances(Vertex container, List<ComponentInstanceDataDefinition> corruptedInstances){
+ try {
+ Map jsonObj = getJsonMap(container);
+ fixComponentToscaName(corruptedInstances, jsonObj);
+ String jsonMetadataStr = JsonParserUtils.toJson(jsonObj);
+ container.property(GraphPropertyEnum.JSON.getProperty(), jsonMetadataStr);
+ } catch (IOException e) {
+ throw new StorageException("Failed to fix the corrupted instances of the container", e, TitanOperationStatus.GENERAL_ERROR);
+ }
+ }
+
+ private void fixComponentToscaName(List<ComponentInstanceDataDefinition> corruptedInstances, Map<String, ? extends ToscaDataDefinition> jsonObj) {
+ List<String> ids = corruptedInstances
+ .stream()
+ .map(ComponentInstanceDataDefinition::getUniqueId)
+ .collect(toList());
+
+ CompositionDataDefinition composition = (CompositionDataDefinition)jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+ composition.getComponentInstances()
+ .values()
+ .stream()
+ .filter(i->ids.contains(i.getUniqueId()))
+ .forEach(i->i.setToscaComponentName(VALID_TOSCA_NAME));
+ }
+
+ private Map getJsonMap(Vertex container) {
+ String json = (String)container.property(GraphPropertyEnum.JSON.getProperty()).value();
+ Map<GraphPropertyEnum, Object> properties = titanDao.getVertexProperties(container);
+ VertexTypeEnum label = VertexTypeEnum.getByName((String) (properties.get(GraphPropertyEnum.LABEL)));
+ return JsonParserUtils.toMap(json, label != null ? label.getClassOfJson() : null);
+ }
+
+ private void writeOutput(Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData) {
+ if(outputHandler.getOutputPath() != null){
+ if(MapUtils.isNotEmpty(corruptedData)){
+ corruptedData.forEach(this::addVrfObjectRecord);
+ } else {
+ outputHandler.addRecord("CORRUPTED VRF OBJECT NOT FOUND");
+ }
+ outputHandler.writeOutputAndCloseFile();
+ }
+ }
+
+ private List<GraphVertex> rightOnGet(TitanOperationStatus status) {
+ if(status == TitanOperationStatus.NOT_FOUND){
+ return emptyList();
+ }
+ throw new StorageException(status);
+ }
+ private GraphVertex rightOnUpdate(TitanOperationStatus status) {
+ throw new StorageException(status);
+ }
+
+ private void addVrfObjectRecord(GraphVertex vrfObject, Map<Vertex, List<ComponentInstanceDataDefinition>> instances) {
+ outputHandler.addRecord(vrfObject.getMetadataProperties().get(GraphPropertyEnum.VERSION).toString());
+ instances.forEach(this::addVrfObjectInstances);
+ }
+
+ private void addVrfObjectInstances(Vertex container, List<ComponentInstanceDataDefinition> instances) {
+ outputHandler.addRecord("", container.property(GraphPropertyEnum.NAME.getProperty()).value().toString(), container.property(GraphPropertyEnum.UNIQUE_ID.getProperty()).value().toString());
+ instances.forEach(i->outputHandler.addRecord("","","",i.getName(),i.getUniqueId()));
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
new file mode 100644
index 0000000000..bdf3d3b71b
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
@@ -0,0 +1,38 @@
+package org.openecomp.sdc.asdctool.impl.internal.tool;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.openecomp.sdc.asdctool.utils.ReportWriter;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+
+public abstract class CommonInternalTool {
+ protected ReportWriter reportWriter;
+ private String reportType;
+
+ CommonInternalTool(String reportType){
+ this.reportType = reportType;
+ }
+ protected ReportWriter getReportWriter() throws IOException{
+ if ( reportWriter == null ){
+ reportWriter = new ReportWriter(reportType);
+ }
+ return reportWriter;
+ }
+ public void closeAll() {
+ try {
+ getReportWriter().close();
+ } catch (IOException e) {
+ ConsoleWriter.dataLine("\nFailed to close report file.");
+ }
+ }
+ protected void printComponentInfo(Map<GraphPropertyEnum, Object> metadataProperties) {
+ ConsoleWriter.dataLine("component from type", metadataProperties.get(GraphPropertyEnum.COMPONENT_TYPE));
+ ConsoleWriter.dataLine("component name", metadataProperties.get(GraphPropertyEnum.NAME));
+ ConsoleWriter.dataLine("component version", metadataProperties.get(GraphPropertyEnum.VERSION));
+ ConsoleWriter.dataLine("component state", metadataProperties.get(GraphPropertyEnum.STATE));
+ ConsoleWriter.dataLine("component is highest", metadataProperties.get(GraphPropertyEnum.IS_HIGHEST_VERSION));
+ ConsoleWriter.dataLine("component is archived", metadataProperties.get(GraphPropertyEnum.IS_ARCHIVED));
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
new file mode 100644
index 0000000000..21085942ac
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
@@ -0,0 +1,187 @@
+package org.openecomp.sdc.asdctool.impl.internal.tool;
+
+import java.io.IOException;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Scanner;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.tosca.CsarUtils;
+import org.openecomp.sdc.be.tosca.ToscaError;
+import org.openecomp.sdc.be.tosca.ToscaExportHandler;
+import org.openecomp.sdc.be.tosca.ToscaRepresentation;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.openecomp.sdc.common.util.GeneralUtility;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import fj.data.Either;
+
+@org.springframework.stereotype.Component("csarGenerator")
+public class CsarGenerator extends CommonInternalTool {
+ public CsarGenerator() {
+ super("generate");
+ }
+
+ @Autowired
+ private TitanDao titanDao;
+ @Autowired
+ private CsarUtils csarUtils;
+ @Autowired
+ private ToscaOperationFacade toscaOperationFacade;
+ @Autowired
+ private ArtifactCassandraDao artifactCassandraDao;
+ @Autowired
+ private ToscaExportHandler toscaExportHandler;
+
+
+ private static Logger log = Logger.getLogger(CsarGenerator.class.getName());
+
+ public void generateCsar(String uuid, Scanner scanner) {
+ TitanOperationStatus status = TitanOperationStatus.OK;
+
+ Map<GraphPropertyEnum, Object> props = new EnumMap<>(GraphPropertyEnum.class);
+ props.put(GraphPropertyEnum.UUID, uuid);
+ props.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ props.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+
+ List<GraphVertex> byCriterria = titanDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, props).either(l -> l, r -> null);
+ if (byCriterria != null && !byCriterria.isEmpty()) {
+ if (byCriterria.size() > 1) {
+ ConsoleWriter.dataLine("Warning ! More that 1 certified service with uuid", uuid);
+ // TBD
+ } else {
+ GraphVertex metadataV = byCriterria.get(0);
+
+ printComponentInfo(metadataV.getMetadataProperties());
+ ConsoleWriter.dataLine("\nGenerate CSAR (yes/no)?");
+ String input = scanner.nextLine();
+ if (input.equalsIgnoreCase("yes")) {
+
+ status = handleService(metadataV, uuid);
+ }
+ }
+ } else {
+ ConsoleWriter.dataLine("No certified service with UUID", uuid);
+ }
+ if (status == TitanOperationStatus.OK) {
+ titanDao.commit();
+ } else {
+ titanDao.rollback();
+ }
+ }
+
+ private TitanOperationStatus handleService(GraphVertex metadataV, String uuid) {
+ TitanOperationStatus status = TitanOperationStatus.OK;
+ org.openecomp.sdc.be.model.Component component = toscaOperationFacade.getToscaFullElement(metadataV.getUniqueId()).either(l -> l, r -> null);
+ if (component != null) {
+
+ Supplier<byte[]> supplier = () -> generateToscaPayload(component);
+ generateArtifact(component, ArtifactTypeEnum.TOSCA_TEMPLATE, supplier);
+
+ supplier = () -> generateCsarPayload(component);
+ generateArtifact(component, ArtifactTypeEnum.TOSCA_CSAR, supplier);
+
+ GraphVertex toscaArtifactV = titanDao.getChildVertex(metadataV, EdgeLabelEnum.TOSCA_ARTIFACTS, JsonParseFlagEnum.ParseJson).either(l->l, r->null);
+ if ( toscaArtifactV != null ){
+ Map<String, ArtifactDataDefinition> copy = component.getToscaArtifacts().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> new ArtifactDataDefinition(e.getValue())));
+ toscaArtifactV.setJson(copy);
+ titanDao.updateVertex(toscaArtifactV);
+ }
+
+ } else {
+ ConsoleWriter.dataLine("Failed to fetch certified service with UUID", uuid);
+ }
+ return status;
+ }
+
+ private TitanOperationStatus generateArtifact(Component component, ArtifactTypeEnum artifactType, Supplier<byte[]> supplier){
+ TitanOperationStatus status = TitanOperationStatus.GENERAL_ERROR;
+ ArtifactDefinition csarArtifact = null;
+ Optional<ArtifactDefinition> op = component.getToscaArtifacts().values().stream().filter(p -> p.getArtifactType().equals(artifactType.getType())).findAny();
+ if (op.isPresent()) {
+ csarArtifact = op.get();
+
+ status = savePayload(component, csarArtifact, supplier);
+ }
+ return status;
+ }
+
+ private byte[] generateCsarPayload(org.openecomp.sdc.be.model.Component component) {
+ return csarUtils.createCsar(component, true, true).either( l -> l, r -> null);
+ }
+ private byte[] generateToscaPayload(Component component){
+ return toscaExportHandler.exportComponent(component).either(l -> l.getMainYaml().getBytes(), r -> null);
+ }
+
+ private TitanOperationStatus savePayload(org.openecomp.sdc.be.model.Component component, ArtifactDefinition csarArtifact, Supplier<byte[]> supplier) {
+ byte[] payload = supplier.get();
+
+ if ( payload == null ) {
+ ConsoleWriter.dataLine("create artifact failed ", csarArtifact.getArtifactLabel());
+ return TitanOperationStatus.GENERAL_ERROR;
+ }
+ ConsoleWriter.dataLine("createartifact success ", csarArtifact.getArtifactLabel());
+ csarArtifact.setPayload(payload);
+ byte[] decodedPayload = csarArtifact.getPayloadData();
+
+ String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(component.getUniqueId(), csarArtifact.getArtifactLabel());
+ csarArtifact.setUniqueId(uniqueId);
+ csarArtifact.setEsId(csarArtifact.getUniqueId());
+
+ ConsoleWriter.dataLine("create artifact unique id ", uniqueId);
+
+
+ csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
+ ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+ artifactCassandraDao.saveArtifact(artifactData);
+ ConsoleWriter.dataLine("Artifact generated and saved into Cassandra ", csarArtifact.getArtifactLabel());
+ report(component, csarArtifact);
+
+ return TitanOperationStatus.OK;
+ }
+
+ private void report(org.openecomp.sdc.be.model.Component component, ArtifactDefinition csarArtifact) {
+ Map<String, Object> dataToPrint = new HashMap<>();
+ dataToPrint.put("name", component.getName());
+ dataToPrint.put("type", component.getComponentType());
+ dataToPrint.put("version", component.getVersion());
+ dataToPrint.put("UUID", component.getUUID());
+ dataToPrint.put("state", component.getLifecycleState());
+ dataToPrint.put("archive", component.isArchived());
+ dataToPrint.put("id", component.getUniqueId());
+ dataToPrint.put("artifact name", csarArtifact.getArtifactLabel());
+ dataToPrint.put("artifact id", csarArtifact.getUniqueId());
+ dataToPrint.put("csar es id", csarArtifact.getEsId());
+ dataToPrint.put("artifact checksum", csarArtifact.getArtifactChecksum());
+
+ try {
+ getReportWriter().report(dataToPrint);
+ } catch (IOException e) {
+ ConsoleWriter.dataLine("\nFailed to created report file.");
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
new file mode 100644
index 0000000000..1dd6c7e10a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
@@ -0,0 +1,200 @@
+package org.openecomp.sdc.asdctool.impl.internal.tool;
+
+import com.thinkaurelius.titan.core.TitanVertex;
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.NodeTypeOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementOperation;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Scanner;
+
+@Component("deleteComponentHandler")
+public class DeleteComponentHandler extends CommonInternalTool{
+ @Autowired
+ private TitanDao titanDao;
+ @Autowired
+ private NodeTypeOperation nodeTypeOperation;
+ @Autowired
+ private TopologyTemplateOperation topologyTemplateOperation;
+
+
+ private static Logger log = Logger.getLogger(DeleteComponentHandler.class.getName());
+
+
+ public DeleteComponentHandler(){
+ super("delete");
+ }
+ public void deleteComponent(String id, Scanner scanner) {
+ TitanOperationStatus status = TitanOperationStatus.OK;
+ GraphVertex metadataVertex = titanDao.getVertexById(id).either(l -> l, r -> null);
+ if (metadataVertex != null) {
+ status = handleComponent(scanner, metadataVertex);
+ } else {
+ ConsoleWriter.dataLine("No vertex for id", id);
+ }
+ if (status == TitanOperationStatus.OK) {
+ titanDao.commit();
+ } else {
+ titanDao.rollback();
+ }
+ }
+
+ private TitanOperationStatus handleComponent(Scanner scanner, GraphVertex metadataVertex) {
+ Map<GraphPropertyEnum, Object> metadataProperties = metadataVertex.getMetadataProperties();
+ TitanOperationStatus status = TitanOperationStatus.OK;
+ printComponentInfo(metadataProperties);
+
+ Iterator<Edge> edges = metadataVertex.getVertex().edges(Direction.OUT, EdgeLabelEnum.VERSION.name());
+ if (edges != null && edges.hasNext()) {
+ ConsoleWriter.dataLine("\ncomponent is not latest version and cannot be deleted");
+ } else {
+ ConsoleWriter.dataLine("\ncomponent is latest .");
+ if (isReferenceExist(metadataVertex)) {
+ ConsoleWriter.dataLine("\nExist reference on component ( istance, proxy or allotted). Component cannot be deleted");
+ } else {
+ ConsoleWriter.dataLine("\nNo references. Try to delete (yes/no)?");
+ String input = scanner.nextLine();
+ if (input.equalsIgnoreCase("yes")) {
+ status = handleComponent(metadataVertex);
+ }
+ }
+ }
+ return status;
+ }
+
+ private TitanOperationStatus handleComponent(GraphVertex metadataVertex) {
+ ToscaElementOperation toscaElementOperation = getOperationByLabel(metadataVertex);
+ Iterator<Edge> edges = metadataVertex.getVertex().edges(Direction.IN, EdgeLabelEnum.VERSION.name());
+ if (edges != null && edges.hasNext()) {
+ TitanOperationStatus status = updatePreviousVersion(metadataVertex, edges);
+ if ( status != TitanOperationStatus.OK ){
+ return status;
+ }
+ }
+ toscaElementOperation.deleteToscaElement(metadataVertex)
+ .left()
+ .map(l -> {
+ ConsoleWriter.dataLine("\nDeleted");
+ report(metadataVertex);
+ return TitanOperationStatus.OK;
+ })
+ .right()
+ .map(r-> {
+ ConsoleWriter.dataLine("\nFailed to delete. see log file");
+ return r;
+ });
+ return TitanOperationStatus.OK;
+ }
+
+ private TitanOperationStatus updatePreviousVersion(GraphVertex metadataVertex, Iterator<Edge> edges) {
+ Edge edge = edges.next();
+ TitanVertex prevVersionVertex = (TitanVertex) edge.outVertex();
+ // check if previous version is deleted
+ Boolean isDeleted = (Boolean) titanDao.getProperty(prevVersionVertex, GraphPropertyEnum.IS_DELETED.getProperty());
+ if (isDeleted != null && isDeleted) {
+ ConsoleWriter.dataLine("\nPrevoius version is marked as deleted. Component cannot be deleted");
+ return TitanOperationStatus.GENERAL_ERROR;
+ }
+ // update highest property for previous version
+ TitanOperationStatus status = updateStateOfPreviuosVersion(prevVersionVertex);
+ if ( TitanOperationStatus.OK != status ){
+ return status;
+ }
+
+ // connect to catalog or archive
+ return connectToCatalogAndArchive(metadataVertex, prevVersionVertex);
+ }
+
+ private TitanOperationStatus updateStateOfPreviuosVersion(TitanVertex prevVersionVertex) {
+ String prevId = (String) titanDao.getProperty(prevVersionVertex, GraphPropertyEnum.UNIQUE_ID.getProperty());
+ Either<GraphVertex, TitanOperationStatus> prevGraphVertex = titanDao.getVertexById(prevId);
+ GraphVertex prevVertex = prevGraphVertex.left().value();
+ prevVertex.addMetadataProperty(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+ titanDao.updateVertex(prevVertex);
+
+ Iterator<Edge> edgesIter = prevVersionVertex.edges(Direction.IN, EdgeLabelEnum.LAST_STATE.name());
+ if ( edgesIter.hasNext() ) {
+ Edge lastStateEdge = edgesIter.next();
+ Vertex lastModifier = lastStateEdge.outVertex();
+ TitanOperationStatus replaceRes = titanDao.replaceEdgeLabel(lastModifier, prevVersionVertex, lastStateEdge, EdgeLabelEnum.LAST_STATE, EdgeLabelEnum.STATE);
+ if (replaceRes != TitanOperationStatus.OK) {
+ log.info("Failed to replace label from {} to {}. status = {}", EdgeLabelEnum.LAST_STATE, EdgeLabelEnum.STATE, replaceRes);
+ ConsoleWriter.dataLine("\nFailed to replace LAST_STATE edge . Failed to delete");
+ return TitanOperationStatus.GENERAL_ERROR;
+ }
+ }
+ return TitanOperationStatus.OK;
+ }
+
+
+ private TitanOperationStatus connectToCatalogAndArchive(GraphVertex metadataVertex, TitanVertex prevVersionVertex) {
+
+ TitanOperationStatus status = connectByLabel(metadataVertex, prevVersionVertex, EdgeLabelEnum.CATALOG_ELEMENT, VertexTypeEnum.CATALOG_ROOT);
+ if ( status == TitanOperationStatus.OK ){
+ status = connectByLabel(metadataVertex, prevVersionVertex, EdgeLabelEnum.ARCHIVE_ELEMENT, VertexTypeEnum.ARCHIVE_ROOT);
+ }
+ return status;
+ }
+
+ private TitanOperationStatus connectByLabel(GraphVertex metadataVertex, TitanVertex prevVersionVertex, EdgeLabelEnum edgeLabel, VertexTypeEnum vertexlabel) {
+ Iterator<Edge> edgesToCatalog = metadataVertex.getVertex().edges(Direction.IN, edgeLabel.name());
+ if ( edgesToCatalog != null && edgesToCatalog.hasNext() ){
+ //exist edge move to prev version
+ Either<GraphVertex, TitanOperationStatus> catalog = titanDao.getVertexByLabel(vertexlabel);
+ if (catalog.isRight()) {
+ log.debug("Failed to fetch {} vertex, error {}", vertexlabel, catalog.right().value());
+ return catalog.right().value();
+ }
+ GraphVertex catalogV = catalog.left().value();
+ Edge edge = edgesToCatalog.next();
+ return titanDao.createEdge(catalogV.getVertex(), prevVersionVertex, edgeLabel, edge );
+ }
+ return TitanOperationStatus.OK;
+ }
+
+ private boolean isReferenceExist(GraphVertex metadataVertex) {
+ return existEdgeByLabel(metadataVertex, EdgeLabelEnum.INSTANCE_OF) || existEdgeByLabel(metadataVertex, EdgeLabelEnum.PROXY_OF) || existEdgeByLabel(metadataVertex, EdgeLabelEnum.ALLOTTED_OF);
+ }
+
+ private boolean existEdgeByLabel(GraphVertex metadataVertex, EdgeLabelEnum label) {
+ Iterator<Edge> edges = metadataVertex.getVertex().edges(Direction.IN, label.name());
+ return (edges != null && edges.hasNext());
+ }
+
+ private ToscaElementOperation getOperationByLabel(GraphVertex metadataVertex) {
+ VertexTypeEnum label = metadataVertex.getLabel();
+ if (label == VertexTypeEnum.NODE_TYPE) {
+ return nodeTypeOperation;
+ } else {
+ return topologyTemplateOperation;
+ }
+ }
+
+ private void report(GraphVertex metadataVertex) {
+ try {
+ getReportWriter().report(metadataVertex.getMetadataProperties());
+ } catch (IOException e) {
+ ConsoleWriter.dataLine("\nFailed to created report file.");
+ }
+ }
+
+
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
index 16eb0be95e..937cb7c0b4 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
@@ -1,16 +1,14 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.impl.validator.executers.IArtifactValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
+import java.util.List;
+
public class ArtifactToolBL {
- private static Logger log = LoggerFactory.getLogger(ValidationToolBL.class.getName());
+ private static Logger log = Logger.getLogger(ValidationToolBL.class.getName());
@Autowired
protected List<IArtifactValidatorExecuter> validators;
@@ -19,13 +17,13 @@ public class ArtifactToolBL {
public boolean validateAll() {
for (IArtifactValidatorExecuter validatorExec: validators) {
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" started");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" started");
if (!validatorExec.executeValidations()) {
allValid = false;
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished with warnings");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" finished with warnings");
}
else {
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished successfully");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" finished successfully");
}
}
return allValid;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java
index 9406351a6c..b8065e03f6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java
@@ -1,21 +1,19 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.impl.validator.executers.ValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
+import java.util.List;
+
/**
* Created by chaya on 7/3/2017.
*/
@Component
public class ValidationToolBL {
- private static Logger log = LoggerFactory.getLogger(ValidationToolBL.class.getName());
+ private static Logger log = Logger.getLogger(ValidationToolBL.class.getName());
@Autowired
protected List<ValidatorExecuter> validators;
@@ -24,13 +22,13 @@ public class ValidationToolBL {
public boolean validateAll() {
for (ValidatorExecuter validatorExec: validators) {
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" started");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" started");
if (!validatorExec.executeValidations()) {
allValid = false;
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished with warnings");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" finished with warnings");
}
else {
- System.out.println("ValidatorExecuter "+validatorExec.getName()+" finished successfully");
+ log.debug("ValidatorExecuter "+validatorExec.getName()+" finished successfully");
}
}
return allValid;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
index 63e95d568b..eb2574405f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
@@ -2,11 +2,7 @@ package org.openecomp.sdc.asdctool.impl.validator.config;
import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
-import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
-import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
-import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.*;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.VfArtifactValidationTask;
@@ -19,16 +15,7 @@ import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsontitan.operations.ArtifactsOperations;
-import org.openecomp.sdc.be.model.jsontitan.operations.ByToscaNameDerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsontitan.operations.CategoryOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.GroupsOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.NodeTemplateOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.NodeTypeOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaDataOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementLifecycleOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.jsontitan.operations.*;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
index 3585474398..089e9729d4 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
@@ -1,20 +1,6 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.io.BufferedWriter;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
@@ -26,113 +12,121 @@ import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.ComponentParametersView;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
-import fj.data.Either;
-
-public class ArtifactValidatorExecuter {
-
- @Autowired
- protected TitanDao titanDao;
-
- @Autowired
- private ToscaOperationFacade toscaOperationFacade;
- private static Logger log = LoggerFactory.getLogger(ArtifactValidatorExecuter.class.getName());
-
- protected String name;
-
- public void setName(String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
-
- public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
- Map<GraphPropertyEnum, Object> hasProps) {
-
- Map<String, List<Component>> result = new HashMap<>();
- Either<List<GraphVertex>, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps);
- if (resultsEither.isRight()) {
- System.out.println("getVerticesToValidate failed " + resultsEither.right().value());
- return result;
- }
- System.out.println("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan");
- List<GraphVertex> componentsList = resultsEither.left().value();
- componentsList.forEach(vertex -> {
- String ivariantUuid = (String)vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
- if (!result.containsKey(ivariantUuid)) {
- result.put(ivariantUuid, new ArrayList<>());
- }
- List<Component> compList = result.get(ivariantUuid);
-
- ComponentParametersView filter = new ComponentParametersView(true);
- filter.setIgnoreArtifacts(false);
-
- Either<Component, StorageOperationStatus> toscaElement
- = toscaOperationFacade.getToscaElement(vertex.getUniqueId(), filter);
- if (toscaElement.isRight()) {
- System.out.println("getVerticesToValidate: failed to find element"
- + vertex.getUniqueId() + " staus is" + toscaElement.right().value());
- } else {
- compList.add(toscaElement.left().value());
- }
- });
- return result;
- }
-
- public boolean validate(Map<String, List<Component>> vertices) {
- boolean result = true;
- long time = System.currentTimeMillis();
- String fileName = ValidationConfigManager.getOutputFilePath() + this.getName() + "_" + time + ".csv";
-
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"))) {
- writer.write("name, UUID, invariantUUID, state, version\n");
- Collection<List<Component>> collection = vertices.values();
- for (List<Component> compList: collection) {
- Set<String> artifactEsId = new HashSet<>();
- for (Component component: compList) {
- Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
- Optional<ArtifactDefinition> op = toscaArtifacts.values()
- .stream().filter(a -> artifactEsId.contains(a.getEsId())).findAny();
- if (op.isPresent()) {
- result = false;
- writeModuleResultToFile(writer, compList);
- writer.flush();
- break;
- } else {
- artifactEsId.addAll(toscaArtifacts.values()
- .stream().map(ArtifactDefinition::getEsId).collect(Collectors.toList()));
- }
- }
- }
- } catch (Exception e) {
- log.info("Failed to fetch vf resources ", e);
- return false;
- } finally {
- titanDao.commit();
- }
- return result;
- }
+import java.io.*;
+import java.util.*;
+import java.util.stream.Collectors;
- private void writeModuleResultToFile(Writer writer, List<Component> components) {
- try {
- // "service name, service id, state, version
- for (Component component: components ) {
- StringBuilder sb = new StringBuilder(component.getName());
- sb.append(",").append(component.getUniqueId())
- .append(",").append(component.getInvariantUUID())
- .append(",").append(component.getLifecycleState())
- .append(",").append(component.getVersion())
- .append("\n");
+public class ArtifactValidatorExecuter{
+
+ @Autowired
+ protected TitanDao titanDao;
+
+ @Autowired
+ private ToscaOperationFacade toscaOperationFacade;
+ private static Logger log = Logger.getLogger(ArtifactValidatorExecuter.class.getName());
+
+ protected String name;
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+
+
+ public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type, Map<GraphPropertyEnum, Object> hasProps){
+ Map<String, List<Component>> result = new HashMap<>();
+ Either<List<GraphVertex>, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps);
+ if (resultsEither.isRight()) {
+ System.out.println("getVerticesToValidate failed "+ resultsEither.right().value());
+ return result;
+ }
+ System.out.println("getVerticesToValidate: "+resultsEither.left().value().size()+" vertices to scan");
+ List<GraphVertex> componentsList = resultsEither.left().value();
+ componentsList.forEach(vertex -> {
+ String ivariantUuid = (String)vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
+ if(!result.containsKey(ivariantUuid)){
+ List<Component> compList = new ArrayList<Component>();
+ result.put(ivariantUuid, compList);
+ }
+ List<Component> compList = result.get(ivariantUuid);
+
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreArtifacts(false);
+
+ Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(vertex.getUniqueId(), filter);
+ if (toscaElement.isRight()) {
+ System.out.println("getVerticesToValidate: failed to find element"+ vertex.getUniqueId()+" staus is" + toscaElement.right().value());
+ }else{
+ compList.add(toscaElement.left().value());
+ }
+
+ });
+
+ return result;
+ }
+
+ public boolean validate( Map<String, List<Component>> vertices) {
+ boolean result = true;
+ long time = System.currentTimeMillis();
+ String fileName = ValidationConfigManager.getOutputFilePath() + this.getName() + "_"+ time + ".csv";
+ Writer writer = null;
+ try {
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("name, UUID, invariantUUID, state, version\n");
+ Collection<List<Component>> collection = vertices.values();
+ for(List<Component> compList: collection ){
+ Set<String> artifactEsId = new HashSet<>();
+ for(Component component: compList ){
+ Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
+ Optional<ArtifactDefinition> op = toscaArtifacts.values().
+ stream().filter(a -> artifactEsId.contains(a.getEsId())).findAny();
+ if(op.isPresent()){
+ result = false;
+ writeModuleResultToFile(writer, compList);
+ writer.flush();
+ break;
+ }else{
+ artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId).collect(Collectors.toList())) ;
+ }
+ }
+
+ }
+
+ } catch (Exception e) {
+ log.info("Failed to fetch vf resources ", e);
+ return false;
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */}
+ }
+ return result;
+ }
+
+ private void writeModuleResultToFile(Writer writer, List<Component> components) {
+ try {
+ // "service name, service id, state, version
+ for(Component component: components ){
+ StringBuffer sb = new StringBuffer(component.getName());
+ sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()).append(",").append(component.getLifecycleState()).append(",").append(component.getVersion());
+
+ sb.append("\n");
+ writer.write(sb.toString());
+ }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
- writer.write(sb.toString());
- }
- } catch (IOException e) {
- log.debug("Cannot write module result to file", e);
- }
- }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
index 6715c8a955..8e6ddabb7b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
@@ -1,15 +1,15 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
public class NodeToscaArtifactsValidatorExecuter extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
protected String name;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
index 2fe5abef3c..a6377fce50 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
@@ -1,15 +1,15 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
public class ServiceToscaArtifactsValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
index b64fdde8d7..efad4bb392 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
@@ -1,15 +1,13 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.ArrayList;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.impl.validator.tasks.ServiceValidationTask;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* Created by chaya on 7/4/2017.
*/
@@ -18,8 +16,6 @@ public class ServiceValidatorExecuter extends TopologyTemplateValidatorExecuter
@Autowired(required = false)
List<ServiceValidationTask> tasks = new ArrayList<>();
- private static Logger log = LoggerFactory.getLogger(VfValidatorExecuter.class.getName());
-
public ServiceValidatorExecuter() {
setName("SERVICE_VALIDATOR");
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
index f875aa4742..a10d3193b2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
@@ -1,12 +1,6 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.tasks.TopologyTemplateValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
@@ -17,19 +11,17 @@ import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
-import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
-import fj.data.Either;
+import java.util.*;
/**
* Created by chaya on 7/3/2017.
*/
public class TopologyTemplateValidatorExecuter {
- private static Logger log = LoggerFactory.getLogger(VfValidatorExecuter.class.getName());
+ private static Logger log = Logger.getLogger(VfValidatorExecuter.class.getName());
@Autowired
protected TitanDao titanDao;
@@ -53,10 +45,10 @@ public class TopologyTemplateValidatorExecuter {
Either<List<GraphVertex>, TitanOperationStatus> results = titanDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, props);
if (results.isRight()) {
- System.out.println("getVerticesToValidate failed "+ results.right().value());
+ log.error("getVerticesToValidate failed "+ results.right().value());
return new ArrayList<>();
}
- System.out.println("getVerticesToValidate: "+results.left().value().size()+" vertices to scan");
+ log.info("getVerticesToValidate: "+results.left().value().size()+" vertices to scan");
return results.left().value();
}
@@ -85,7 +77,7 @@ public class TopologyTemplateValidatorExecuter {
ReportManager.reportTaskEnd(vertex.getUniqueId(), task.getTaskName(), result);
}
String componentScanStatus = successAllTasks? "success" : "failed";
- System.out.println("Topology Template "+vertex.getUniqueId()+" Validation finished with "+componentScanStatus);
+ log.info("Topology Template "+vertex.getUniqueId()+" Validation finished with "+componentScanStatus);
}
ReportManager.reportValidatorTypeSummary(getName(), failedTasks, successTasks);
return successAllVertices;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
index bc22f2e424..5f1413f992 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
@@ -1,9 +1,5 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
@@ -11,6 +7,10 @@ import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
public class VFToscaArtifactValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
public VFToscaArtifactValidatorExecutor() {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
index b4f333dab3..221e665d4a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
@@ -1,13 +1,13 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.ArrayList;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.springframework.beans.factory.annotation.Autowired;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* Created by chaya on 7/3/2017.
*/
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
index 90d8f8639c..faccd647c2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
@@ -1,11 +1,6 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -19,7 +14,7 @@ import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.springframework.beans.factory.annotation.Autowired;
-import fj.data.Either;
+import java.util.*;
/**
* Created by chaya on 7/6/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java
index c82a7b9d4f..c010148644 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
+import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
+
import java.util.HashSet;
import java.util.Set;
-import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
-
/**
* Created by chaya on 7/25/2017.
*/
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java
index 1c18fb751e..ed2ceb947b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java
@@ -3,7 +3,6 @@ package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
import org.springframework.beans.factory.annotation.Autowired;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
index 78045a56a6..272b546a5d 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
@@ -1,13 +1,6 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.moduleJson;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.tasks.ServiceValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
@@ -24,7 +17,8 @@ import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.springframework.beans.factory.annotation.Autowired;
-import fj.data.Either;
+import java.util.*;
+import java.util.stream.Collectors;
/**
* Created by chaya on 7/18/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnum.java
index e6a09ee875..245d38c853 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnum.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnum.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
+
import java.util.ArrayList;
import java.util.List;
-import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
-
/**
* Created by chaya on 7/4/2017.
*/
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
index ebedf0a568..88a114cb63 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
@@ -1,18 +1,14 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
+import org.apache.commons.lang.text.StrBuilder;
+import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-
-import org.apache.commons.lang.text.StrBuilder;
-import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
-import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import java.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
index a2e59464c0..def9ed2c8a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
@@ -3,13 +3,12 @@ package org.openecomp.sdc.asdctool.main;
import org.openecomp.sdc.asdctool.configuration.ArtifactUUIDFixConfiguration;
import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class ArtifactUUIDFixMenu {
- private static Logger log = LoggerFactory.getLogger(ArtifactUUIDFixMenu.class);
+ private static Logger log = Logger.getLogger(ArtifactUUIDFixMenu.class.getName());
public static void main(String[] args) {
if (args == null || args.length < 3) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
index ba8f3be4e9..2e84923188 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
@@ -7,12 +7,11 @@ import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class ArtifactValidatorTool {
- private static Logger log = LoggerFactory.getLogger(ValidationTool.class.getName());
+ private static Logger log = Logger.getLogger(ValidationTool.class.getName());
public static void main(String[] args) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
new file mode 100644
index 0000000000..e2b35ad739
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
@@ -0,0 +1,45 @@
+package org.openecomp.sdc.asdctool.main;
+
+import java.util.Scanner;
+
+import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
+import org.openecomp.sdc.asdctool.configuration.CsarGeneratorConfiguration;
+import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class CsarGeneratorTool extends SdcInternalTool {
+
+ public static void main(String[] args) {
+ if (args == null) {
+ ConsoleWriter.dataLine("Usage: <configuration dir> ");
+ System.exit(1);
+ }
+ String appConfigDir = args[0];
+
+ disableConsole();
+
+ ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(CsarGeneratorConfiguration.class);
+ CsarGenerator csarGenerator = context.getBean(CsarGenerator.class);
+ ConsoleWriter.dataLine("STARTED... ");
+
+ String input = "";
+ Scanner scanner = new Scanner(System.in);
+ do {
+ ConsoleWriter.dataLine("Enter next service UUID or exit: ");
+ input = scanner.nextLine();
+ if (!input.equals("exit")) {
+ if (!input.isEmpty()) {
+ ConsoleWriter.dataLine("Your UUID is ", input);
+ csarGenerator.generateCsar(input, scanner);
+ } else {
+ ConsoleWriter.dataLine("Your UUID is empty. Try again.");
+ }
+ }
+ } while (!input.equals("exit"));
+ csarGenerator.closeAll();
+ ConsoleWriter.dataLine("CsarGeneratorTool exit...");
+ System.exit(0);
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
index 665a9b75d1..496eb1864e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
@@ -26,12 +26,11 @@ import org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
public class DataSchemaMenu {
- private static Logger log = LoggerFactory.getLogger(DataSchemaMenu.class.getName());
+ private static Logger log = Logger.getLogger(DataSchemaMenu.class.getName());
public static void main(String[] args) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
new file mode 100644
index 0000000000..8ef1522f86
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
@@ -0,0 +1,55 @@
+package org.openecomp.sdc.asdctool.main;
+
+import java.util.Scanner;
+
+import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
+import org.openecomp.sdc.asdctool.configuration.InternalToolConfiguration;
+import org.openecomp.sdc.asdctool.impl.internal.tool.DeleteComponentHandler;
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class DeleteComponentTool extends SdcInternalTool{
+ private static final String PSW = "ItIsTimeToDelete";
+
+ public static void main(String[] args) {
+ if (args == null || args.length < 2) {
+ ConsoleWriter.dataLine("Usage: <configuration dir> <password>");
+ System.exit(1);
+ }
+ String appConfigDir = args[0];
+ String password = args[1];
+
+ if ( !PSW.equals(password) ){
+ ConsoleWriter.dataLine("Wrong password");
+ System.exit(1);
+ }
+
+ disableConsole();
+ ConsoleWriter.dataLine("STARTED... ");
+
+ ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(InternalToolConfiguration.class);
+ DeleteComponentHandler deleteComponentHandler = context.getBean(DeleteComponentHandler.class);
+
+
+ String input = "";
+ Scanner scanner = new Scanner(System.in);
+ do {
+ ConsoleWriter.dataLine("Enter next component unique id or exit: ");
+ input = scanner.nextLine();
+ if (!input.equals("exit")) {
+ if (!input.isEmpty()) {
+ ConsoleWriter.dataLine("Your id is " ,input);
+ deleteComponentHandler.deleteComponent(input, scanner);
+ }else{
+ ConsoleWriter.dataLine("Your id is empty. Try again.");
+ }
+ }
+ } while (!input.equals("exit"));
+ deleteComponentHandler.closeAll();
+ ConsoleWriter.dataLine("DeleteComponentTool exit...");
+ System.exit(0);
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
index 873bdb1496..c119d7e1ff 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
@@ -26,13 +26,12 @@ import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
public class EsToCassandraDataMigrationMenu {
- private static Logger log = LoggerFactory.getLogger(EsToCassandraDataMigrationMenu.class.getName());
+ private static Logger log = Logger.getLogger(EsToCassandraDataMigrationMenu.class.getName());
public static void main(String[] args) {
@@ -55,7 +54,7 @@ public class EsToCassandraDataMigrationMenu {
case "es-to-cassndra-migration":
dataMigration = (DataMigration) context.getBean("DataMigrationBean");
log.debug("Start migration from ES to C* ");
- if (dataMigration.migrateDataEsToCassandra(appConfigDir, true, true)) {
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, true, true)) {
log.debug("migration from ES to C* was finished successfull");
System.exit(0);
} else {
@@ -66,7 +65,7 @@ public class EsToCassandraDataMigrationMenu {
case "es-to-cassndra-migration-export-only":
dataMigration = (DataMigration) context.getBean("DataMigrationBean");
log.debug("Start migration export only from ES to C* ");
- if (dataMigration.migrateDataEsToCassandra(appConfigDir, true, false)) {
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, true, false)) {
log.debug("migration export only from ES to C* was finished successfull");
System.exit(0);
} else {
@@ -77,7 +76,7 @@ public class EsToCassandraDataMigrationMenu {
case "es-to-cassndra-migration-import-only":
dataMigration = (DataMigration) context.getBean("DataMigrationBean");
log.debug("Start migration import only from ES to C* ");
- if (dataMigration.migrateDataEsToCassandra(appConfigDir, false, true)) {
+ if (dataMigration.migrateDataESToCassndra(appConfigDir, false, true)) {
log.debug("migration import only from ES to C* was finished successfull");
System.exit(0);
} else {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
index 733cc5cc94..189348acc6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
@@ -1,7 +1,6 @@
package org.openecomp.sdc.asdctool.main;
-import java.util.List;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.cli.CLIToolData;
import org.openecomp.sdc.asdctool.cli.SpringCLITool;
import org.openecomp.sdc.asdctool.configuration.GetConsumersConfiguration;
@@ -11,7 +10,7 @@ import org.openecomp.sdc.be.resources.data.ConsumerData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import fj.data.Either;
+import java.util.List;
public class GetConsumersMenu extends SpringCLITool {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
new file mode 100644
index 0000000000..9c2c62fb8f
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
@@ -0,0 +1,16 @@
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.slf4j.LoggerFactory;
+
+import ch.qos.logback.core.Appender;
+
+public abstract class SdcInternalTool {
+ protected static void disableConsole() {
+ org.slf4j.Logger rootLogger = LoggerFactory.getILoggerFactory().getLogger(Logger.ROOT_LOGGER_NAME);
+ Appender appender = ((ch.qos.logback.classic.Logger) rootLogger).getAppender("STDOUT");
+ if (appender != null) {
+ appender.stop();
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
index e915d27502..d2fa3a41fc 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
@@ -20,21 +20,6 @@
package org.openecomp.sdc.asdctool.main;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Date;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
@@ -50,6 +35,16 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
+import java.io.*;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
public class SdcSchemaFileImport {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
index b9a1e1741b..ff548ff71a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
@@ -20,20 +20,19 @@
package org.openecomp.sdc.asdctool.main;
+import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex;
-import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
-import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
public class UpdateIsVnfMenu {
- private static Logger log = LoggerFactory.getLogger(UpdateIsVnfMenu.class.getName());
+ private static Logger log = Logger.getLogger(UpdateIsVnfMenu.class.getName());
private static void usageAndExit() {
updateIsVnfTrueUsage();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java
index e07b3af551..f55120e37c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java
@@ -8,8 +8,7 @@ import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
/**
@@ -17,7 +16,7 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
*/
public class ValidationTool {
- private static Logger log = LoggerFactory.getLogger(ValidationTool.class.getName());
+ private static Logger log = Logger.getLogger(ValidationTool.class.getName());
public static void main(String[] args) {
@@ -29,14 +28,14 @@ public class ValidationTool {
AnnotationConfigApplicationContext context = initContext(appConfigDir);
ValidationToolBL validationToolBL = context.getBean(ValidationToolBL.class);
- System.out.println("Start Validation Tool");
+ log.info("Start Validation Tool");
Boolean result = validationToolBL.validateAll();
ReportManager.reportEndOfToolRun();
if (result) {
- System.out.println("Validation finished successfully");
+ log.info("Validation finished successfully");
System.exit(0);
} else {
- System.out.println("Validation finished with warnings");
+ log.info("Validation finished with warnings");
System.exit(2);
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/VrfObjectFixMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/VrfObjectFixMenu.java
new file mode 100644
index 0000000000..cf159ccf24
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/VrfObjectFixMenu.java
@@ -0,0 +1,52 @@
+package org.openecomp.sdc.asdctool.main;
+
+import org.openecomp.sdc.asdctool.configuration.VrfObjectFixConfiguration;
+import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.util.Arrays;
+
+public class VrfObjectFixMenu {
+
+ private static final Logger log = Logger.getLogger(VrfObjectFixMenu.class);
+
+ private VrfObjectFixMenu(){}
+
+ public static void main(String[] args) {
+ if (isNotValidArguments(args)) {
+ log.debug("#main - The invalid array of the arguments have been received: {}", Arrays.toString(args));
+ log.debug("#main - Usage: <configuration dir> <'detect'/'fix'> <output folder path>");
+ System.exit(1);
+ }
+ initConfig(args[0]);
+ VrfObjectFixHandler vrfObjectFixHandler = getVrfObjectFixHandler();
+ if (vrfObjectFixHandler.handle(args[1], args.length == 3 ? args[2] : null)) {
+ log.info("#main - The {} operation of the corrupted VRFObject Node Types has been finished successfully", args[1]);
+ } else{
+ log.info("#main - The {} operation of the corrupted VRFObject Node Types has been failed", args[1]);
+ System.exit(2);
+ }
+ System.exit(0);
+ }
+
+ private static VrfObjectFixHandler getVrfObjectFixHandler() {
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(VrfObjectFixConfiguration.class);
+ return context.getBean(VrfObjectFixHandler.class);
+ }
+
+ private static boolean isNotValidArguments(String[] args) {
+ return args == null || args.length < 2;
+ }
+
+
+ private static void initConfig(String configDir) {
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), configDir);
+ new ConfigurationManager(configurationSource);
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
index f192d87867..a18d644f08 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
@@ -1,8 +1,5 @@
package org.openecomp.sdc.asdctool.migration.config;
-import java.util.ArrayList;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
@@ -11,6 +8,7 @@ import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
+import org.openecomp.sdc.be.components.scheduledtasks.ComponentsCleanBusinessLogic;
import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
import org.openecomp.sdc.config.CatalogBESpringConfig;
@@ -22,6 +20,9 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.io.FileSystemResource;
+import java.util.ArrayList;
+import java.util.List;
+
@Configuration
@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
@@ -68,4 +69,7 @@ public class MigrationSpringConfig {
return bean;
}
+ @Bean(name = "componentsCleanBusinessLogic")
+ public ComponentsCleanBusinessLogic componentsCleanBusinessLogic() {return new ComponentsCleanBusinessLogic(); }
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
index aa27d6e435..65e508c743 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
@@ -4,6 +4,7 @@ import org.openecomp.sdc.be.components.distribution.engine.IDistributionEngine;
import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
import org.openecomp.sdc.be.dao.api.ActionStatus;
import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
import org.springframework.stereotype.Component;
@@ -16,12 +17,12 @@ public class DistributionEngineMock implements IDistributionEngine {
}
@Override
- public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envName, String userId, String modifierName) {
+ public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envName, User modifier) {
return null;
}
@Override
- public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envId, String envName, String userId, String modifierName) {
+ public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envId, String envName, User modifier) {
return null;
}
@@ -41,7 +42,7 @@ public class DistributionEngineMock implements IDistributionEngine {
}
@Override
- public StorageOperationStatus isReadyForDistribution(Service service, String envName) {
+ public StorageOperationStatus isReadyForDistribution(String envName) {
return null;
}
@@ -51,11 +52,6 @@ public class DistributionEngineMock implements IDistributionEngine {
}
@Override
- public StorageOperationStatus verifyServiceHasDeploymentArtifacts(Service service) {
- return null;
- }
-
- @Override
public OperationalEnvironmentEntry getEnvironmentById(String opEnvId) {
return null;
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java
index 5e4f15ca7e..c7ef45e712 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.migration.config.mocks;
-import javax.annotation.PostConstruct;
-
import org.openecomp.sdc.be.components.health.HealthCheckBusinessLogic;
import org.springframework.stereotype.Component;
+import javax.annotation.PostConstruct;
+
@Component("healthCheckBusinessLogic")
public class HealthCheckBusinessLogicMock extends HealthCheckBusinessLogic {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
index 03d5adfc15..003a27a1e4 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
@@ -13,7 +13,7 @@ public class DBVersion implements Comparable<DBVersion>{
/**
* The current db version. should be tested against real db to verify it is compatible to the db version
*/
- public static final DBVersion CURRENT_VERSION = new DBVersion(1802, 0);
+ public static final DBVersion CURRENT_VERSION = new DBVersion(1710, 0);
private DBVersion(BigInteger major, BigInteger minor) {
this.major = major;
@@ -62,12 +62,8 @@ public class DBVersion implements Comparable<DBVersion>{
@Override
public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
DBVersion dbVersion = (DBVersion) o;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java
index b8e2347970..2435601e7c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java
@@ -1,7 +1,5 @@
package org.openecomp.sdc.asdctool.migration.core;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutionResult;
import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutorImpl;
import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
@@ -9,12 +7,13 @@ import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage.AspectMigr
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
+import java.util.List;
public class SdcMigrationTool {
- private static final Logger LOGGER = LoggerFactory.getLogger(SdcMigrationTool.class);
+ private static final Logger LOGGER = Logger.getLogger(SdcMigrationTool.class);
private MigrationResolver migrationsResolver;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java
index aa871914d6..accd9eca53 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java
@@ -1,11 +1,11 @@
package org.openecomp.sdc.asdctool.migration.core.execution;
-import java.util.Date;
-
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
+import java.util.Date;
+
public class MigrationExecutionResult {
private MigrationResult.MigrationStatus migrationStatus;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java
index 2b3b28a737..fd9ac14944 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java
@@ -3,14 +3,13 @@ package org.openecomp.sdc.asdctool.migration.core.execution;
import org.openecomp.sdc.asdctool.migration.core.MigrationException;
import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.util.StopWatch;
public class MigrationExecutorImpl implements MigrationExecutor {
- private static final Logger LOGGER = LoggerFactory.getLogger(MigrationExecutorImpl.class);
+ private static final Logger LOGGER = Logger.getLogger(MigrationExecutorImpl.class);
@Override
public MigrationExecutionResult execute(IMigrationStage migration) throws MigrationException {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
index b0bfabb059..e1144c14b5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
@@ -1,28 +1,24 @@
package org.openecomp.sdc.asdctool.migration.dao;
-import java.math.BigInteger;
-
-import javax.annotation.PostConstruct;
-
-import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.openecomp.sdc.be.dao.cassandra.CassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;
-
import fj.data.Either;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.dao.cassandra.CassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
+import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
+import javax.annotation.PostConstruct;
+import java.math.BigInteger;
public class MigrationTasksDao extends CassandraDao {
- private static Logger logger = LoggerFactory.getLogger(MigrationTasksDao.class.getName());
+ private static Logger logger = Logger.getLogger(MigrationTasksDao.class.getName());
private MigrationTasksAccessor migrationTasksAccessor;
private Mapper<MigrationTaskEntry> migrationTaskMapper;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
index de6c66d2d7..92c9c3f83c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
@@ -6,13 +6,12 @@ import org.openecomp.sdc.asdctool.cli.CLIToolData;
import org.openecomp.sdc.asdctool.cli.SpringCLITool;
import org.openecomp.sdc.asdctool.migration.config.MigrationSpringConfig;
import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.context.support.AbstractApplicationContext;
public class MigrationMenu extends SpringCLITool {
- private static final Logger LOGGER = LoggerFactory.getLogger(MigrationMenu.class);
+ private static final Logger LOGGER = Logger.getLogger(MigrationMenu.class);
public static void main(String[] args) {
MigrationMenu migrationMenu = new MigrationMenu();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
index 22add31eb4..91511117d0 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.migration.resolver;
-import java.util.List;
-
import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
+import java.util.List;
+
public interface MigrationResolver {
/**
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
index 182996f5e4..b70c9b1ab7 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
@@ -1,17 +1,17 @@
package org.openecomp.sdc.asdctool.migration.resolver;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.List;
-import java.util.stream.Collectors;
-
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.stream.Collectors;
+
public class SpringBeansMigrationResolver implements MigrationResolver {
private List<Migration> migrations = new ArrayList<>();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
index cf4affb345..20451f2c63 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
@@ -1,11 +1,11 @@
package org.openecomp.sdc.asdctool.migration.service;
-import java.math.BigInteger;
-
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao;
import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
+import java.math.BigInteger;
+
public class SdcRepoService {
private MigrationTasksDao migrationTasksDao;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java
index 59259219db..418201984b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java
@@ -2,8 +2,8 @@ package org.openecomp.sdc.asdctool.migration.tasks.handlers;
public interface OutputHandler {
- public void initiate(Object... title);
+ public void initiate(String name, Object... title);
public void addRecord(Object... record);
- public boolean writeOutput();
+ public boolean writeOutputAndCloseFile();
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
index 0ce03b14e9..b5ea6dd0a9 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
@@ -1,65 +1,92 @@
package org.openecomp.sdc.asdctool.migration.tasks.handlers;
-import java.io.FileOutputStream;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-
+import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.text.SimpleDateFormat;
public class XlsOutputHandler implements OutputHandler {
- private final static Logger LOGGER = LoggerFactory.getLogger(XlsOutputHandler.class);
-
+ private static final Logger log = Logger.getLogger(XlsOutputHandler.class);
private Workbook workbook;
private Sheet activeSheet;
- private Row currentRow;
- int rowCount = 0;
+ private int rowCount = 0;
+ private String sheetName;
+ private String outputPath;
- public XlsOutputHandler(Object... title){
- initiate(title);
+ public XlsOutputHandler(String outputPath, String sheetName, Object... title){
+ this.outputPath = outputPath;
+ this.sheetName = sheetName;
+ initiate(sheetName, title);
}
@Override
- public void initiate(Object... title) {
- LOGGER.info("Starting to initiate xls output handler. ");
+ public void initiate(String sheetName, Object... title) {
+ log.info("#initiate - Starting to initiate XlsOutputHandler. ");
workbook = new HSSFWorkbook();
- activeSheet = workbook.createSheet("Upgrade Migration 1710.0 results");
+ activeSheet = workbook.createSheet(sheetName);
addRecord(title);
- LOGGER.info("Xls output handler has been initiated. ");
+ log.info("#initiate - XlsOutputHandler has been initiated. ");
}
@Override
public void addRecord(Object... record) {
- LOGGER.debug("Going to add record {} to output. ", record);
- currentRow = activeSheet.createRow(rowCount++);
- LOGGER.debug("A new row has been created");
+ log.info("#addRecord - Going to add record {} to output. ", record);
+ Row currentRow = activeSheet.createRow(rowCount++);
+ log.info("#addRecord - A new row has been created");
int columnCount = 0;
Cell cell;
for(Object cellValue : record){
cell = currentRow.createCell(columnCount++);
- if(cellValue != null)
- cell.setCellValue(cellValue.toString());
+ if (cellValue != null) {
+ cell.setCellValue(cellValue.toString());
+ }
}
}
@Override
- public boolean writeOutput() {
+ public boolean writeOutputAndCloseFile() {
+ if (rowCount <= 1) {
+ return false;
+ }
try {
- DateFormat df = new SimpleDateFormat("yyyyMMdd_HHmmss");
- String fileName = "UpgradeMigration1710Results_" + df.format(System.currentTimeMillis()) + ".xls";
- LOGGER.info("Going to write xls output file {}. ", fileName);
- workbook.write(new FileOutputStream(fileName));
+ FileOutputStream file = getXlsFile();
+ workbook.write(file);
+ file.close();
return true;
} catch (Exception e) {
- LOGGER.error("Failed to write an output file upon Upgrade migration 1710. Exception {} occured. ", e);
+ log.debug("#writeOutputAndCloseFile - Failed to write an output file. ", e);
return false;
}
}
+ public String getOutputPath() {
+ return outputPath;
+ }
+
+ FileOutputStream getXlsFile() throws FileNotFoundException {
+ String fileName = buildFileName();
+ log.info("#getXlsFile - Going to write the output file {}. ", fileName);
+ return new FileOutputStream(fileName);
+ }
+
+ private String buildFileName() {
+ StringBuilder fileName = new StringBuilder();
+ if(StringUtils.isNotEmpty(outputPath)){
+ fileName.append(outputPath);
+ }
+ return fileName.append(sheetName)
+ .append("_")
+ .append(new SimpleDateFormat("yyyyMMdd_HHmmss").format(System.currentTimeMillis()))
+ .append(".xls")
+ .toString();
+ }
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
index 9d01ab0d9c..4b9af31092 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
@@ -1,14 +1,8 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1710;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+import fj.data.Either;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
@@ -17,9 +11,13 @@ import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
import org.openecomp.sdc.be.components.impl.ComponentInstanceBusinessLogic;
import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.exceptions.ComponentException;
import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction;
import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction.LifecycleChanceActionEnum;
+import org.openecomp.sdc.be.components.scheduledtasks.ComponentsCleanBusinessLogic;
+import org.openecomp.sdc.be.config.Configuration;
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.be.dao.api.ActionStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
@@ -28,41 +26,32 @@ import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
-import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
-import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
-import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
-import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
-import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.*;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.ComponentInstance;
-import org.openecomp.sdc.be.model.ComponentInstanceProperty;
-import org.openecomp.sdc.be.model.ComponentParametersView;
-import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.Resource;
-import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.*;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.jsontitan.utils.ModelConverter;
import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.openecomp.sdc.exception.ResponseFormat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
-import com.google.common.collect.Lists;
-
-import fj.data.Either;
+import javax.annotation.PostConstruct;
+import java.util.*;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
-//@Component
+@Component
public class UpgradeMigration1710 implements PostMigration {
-
- private static final String SERVICE_UUID_RPOPERTY = "providing_service_uuid";
-
- private static final String SERVICE_INVARIANT_UUID_RPOPERTY = "providing_service_invariant_uuid";
-
+
+ private static final String SERVICE_UUID_RPOPERTY = "providing_service_uuid";
+
+ private static final String SERVICE_INVARIANT_UUID_RPOPERTY = "providing_service_invariant_uuid";
+
private static final String UNKNOWN = "UNKNOWN";
private static final String CHECKOUT_MESSAGE = "checkout upon upgrade migration";
@@ -75,7 +64,13 @@ public class UpgradeMigration1710 implements PostMigration {
private static final String UPGRADE_VFS_FAILED = "Upgrade VFs upon upgrade migration 1710 process failed. ";
- private static final Logger LOGGER = LoggerFactory.getLogger(UpgradeMigration1710.class);
+ private static final Logger log = Logger.getLogger(UpgradeMigration1710.class);
+
+ private static final String ALLOTTED_RESOURCE_NAME = "Allotted Resource";
+
+ //as per US 397775, only node type upgrade should be enabled,
+ // to support resource and service upgrade, this flag should be reverted
+ private boolean isNodeTypesSupportOnly = true;
@Autowired
private TitanDao titanDao;
@@ -93,6 +88,9 @@ public class UpgradeMigration1710 implements PostMigration {
private ResourceBusinessLogic resourceBusinessLogic;
@Autowired
+ private ServiceBusinessLogic serviceBusinessLogic;
+
+ @Autowired
private CsarOperation csarOperation;
@Autowired
@@ -101,7 +99,10 @@ public class UpgradeMigration1710 implements PostMigration {
@Autowired
private ComponentsUtils componentsUtils;
- private XlsOutputHandler outputHandler = new XlsOutputHandler("COMPONENT TYPE", "COMPONENT NAME", "COMPONENT UUID", "COMPONENT UNIQUE_ID", "UPGRADE STATUS", "DESCRIPTION");
+ @Autowired
+ private ComponentsCleanBusinessLogic componentsCleanBusinessLogic;
+
+ private XlsOutputHandler outputHandler = new XlsOutputHandler(null, "UpgradeMigration1710report","COMPONENT TYPE", "COMPONENT NAME", "COMPONENT UUID", "COMPONENT UNIQUE_ID", "UPGRADE STATUS", "DESCRIPTION");
private User user = null;
@@ -110,101 +111,179 @@ public class UpgradeMigration1710 implements PostMigration {
private final Map<String, GraphVertex> latestGenericTypes = new HashMap<>();
private final Map<String, String> latestOriginResourceVersions = new HashMap<>();
-
- private final List<String> proxyServiceContainers = new ArrayList<>();
-
- private final List<String> vfAllottedResources = new ArrayList<>();
-
- private final List<String> allottedVfContainers = new ArrayList<>();
+
+ private final Map<String, org.openecomp.sdc.be.model.Component> upgradedNodeTypesMap = new HashMap<>();
+
+ private List<String> nodeTypes;
+
+ private List<String> proxyServiceContainers = new ArrayList<>();
+
+ private List<String> vfAllottedResources = new ArrayList<>();
+
+ private List<String> allottedVfContainers = new ArrayList<>();
private boolean isVfcUpgradeRequired = false;
private boolean skipIfUpgradeVfFailed = true;
+ private boolean isAllottedAndProxySupported = true;
+
+ private String userId;
+
+ private boolean isCleanupLocked = false;
+
+ private int markedAsDeletedResourcesCnt = 0;
+
+ private int markedAsDeletedServicesCnt = 0;
+
+ //how many components can be deleted once
+ private int maxDeleteComponents = 10;
+
+ private boolean enableAutoHealing = true;
+
+ //map for tracing checked out resources that keep in place after upgrade failure
+ private HashMap<String, String> certifiedToNextCheckedOutUniqueId = new HashMap<>();
+
+ private int deleteLockTimeoutInSeconds = 60;
+
+ private boolean isLockSucceeded = false;
+
/***********************************************/
+ @VisibleForTesting
+ void setNodeTypesSupportOnly(boolean nodeTypesSupportOnly) {
+ isNodeTypesSupportOnly = nodeTypesSupportOnly;
+ }
+
+ @VisibleForTesting
+ void setUser(User user) {
+ this.user = user;
+ }
+
+ @VisibleForTesting
+ void setMarkedAsDeletedResourcesCnt(int markedAsDeletedResourcesCnt) {
+ this.markedAsDeletedResourcesCnt = markedAsDeletedResourcesCnt;
+ }
+
+ @VisibleForTesting
+ void setMarkedAsDeletedServicesCnt(int markedAsDeletedServicesCnt) {
+ this.markedAsDeletedServicesCnt = markedAsDeletedServicesCnt;
+ }
+
+ @PostConstruct
+ void init() {
+ Configuration config = ConfigurationManager.getConfigurationManager().getConfiguration();
+ isVfcUpgradeRequired = !config.getSkipUpgradeVSPsFlag();
+ skipIfUpgradeVfFailed = config.getSkipUpgradeFailedVfs();
+ isAllottedAndProxySupported = config.getSupportAllottedResourcesAndProxyFlag();
+ deleteLockTimeoutInSeconds = config.getDeleteLockTimeoutInSeconds();
+ maxDeleteComponents = config.getMaxDeleteComponents();
+
+ String toscaConformanceLevel = config.getToscaConformanceLevel();
+ Map<String, List<String>> resourcesForUpgrade = config.getResourcesForUpgrade();
+ nodeTypes = resourcesForUpgrade.get(toscaConformanceLevel);
+ enableAutoHealing = config.isEnableAutoHealing();
+ userId = config.getAutoHealingOwner();
+ isNodeTypesSupportOnly = true;
+ }
+
@Override
public String description() {
return "Upgrade migration 1710 - post migration task, which is dedicated to upgrade all latest certified (and not checked out) Node types, VFs and Services. ";
}
- private enum UpgradeStatus {
+ enum UpgradeStatus {
UPGRADED,
+ UPGRADED_AS_INSTANCE,
NOT_UPGRADED
}
@Override
public MigrationResult migrate() {
- LOGGER.info("Starting upgrade migration 1710 process. ");
MigrationResult migrationResult = new MigrationResult();
-
+ //stop the upgrade if this ask is disabled
+ if (!enableAutoHealing) {
+ log.warn("Upgrade migration 1710 task is disabled");
+ migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED);
+ return migrationResult ;
+ }
+ log.info("Starting upgrade migration 1710 process. ");
boolean result = true;
+
try {
- isVfcUpgradeRequired = !ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeVSPsFlag();
- skipIfUpgradeVfFailed = ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeFailedVfs();
- final String userId = ConfigurationManager.getConfigurationManager().getConfiguration().getAutoHealingOwner();
+ //lock cleanup node to avoid BE to delete marked components
+ //while the auto-healing process is running
+ isLockSucceeded = isNodeTypesSupportOnly ? true : isLockDeleteOperationSucceeded();
- Either<User, ActionStatus> userReq = userAdminOperation.getUserData(userId, false);
- if (userReq.isRight()) {
+ if (!isLockSucceeded) {
result = false;
- LOGGER.error("Upgrade migration failed. User {} resolve failed: {} ", userId, userReq.right().value());
- } else {
- user = userReq.left().value();
- LOGGER.info("User {} will perform upgrade operation", user.getUserId());
- }
- if (result) {
- result = upgradeNodeTypes();
+ log.error("Cleanup node can't be locked. Upgrade migration failed");
}
- if (result) {
- result = upgradeVFs();
+ else {
+ Either<User, ActionStatus> userReq = userAdminOperation.getUserData(userId, false);
+ if (userReq.isRight()) {
+ result = false;
+ log.error("Upgrade migration failed. User {} resolve failed: {} ", userId, userReq.right().value());
+ } else {
+ user = userReq.left().value();
+ log.info("User {} will perform upgrade operation", user.getUserId());
+ }
}
if (result) {
- upgradeServices();
- }
- if(result){
- upgradeProxyServiceContainers();
- }
- if(result){
- upgradeAllottedVFs();
+ result = upgradeNodeTypes();
}
- if(result){
- upgradeAllottedVfContainers();
+ if (!isNodeTypesSupportOnly && result) {
+ result = upgradeTopologyTemplates();
}
- } catch (Exception e) {
+ }
+ catch (Exception e) {
result = false;
- LOGGER.error("Error occurred {}. ", e);
+ log.error("Error occurred during the migration: ", e);
} finally {
- if (result) {
- LOGGER.info("Upgrade migration 1710 has been successfully finished. ");
- titanDao.commit();
- migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED);
- } else {
- LOGGER.info("Upgrade migration 1710 was failed. ");
- titanDao.rollback();
- migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.FAILED);
- }
- outputHandler.writeOutput();
+ MigrationResult.MigrationStatus status = result ?
+ MigrationResult.MigrationStatus.COMPLETED : MigrationResult.MigrationStatus.FAILED;
+ cleanup(status);
+ migrationResult.setMigrationStatus(status);
}
return migrationResult;
}
- private void upgradeAllottedVfContainers() {
- LOGGER.info("Starting upgrade proxy {} service containers upon upgrade migration 1710 process. ", allottedVfContainers.size());
- for(String currUid : allottedVfContainers){
- upgradeServiceAndCommitIfNeeded(currUid, component -> true);
+ private boolean upgradeTopologyTemplates() {
+ if (upgradeVFs()) {
+ upgradeServices();
+ upgradeProxyServiceContainers();
+ upgradeAllottedVFs();
+ upgradeAllottedVfContainers();
+ return true;
}
- }
+ return false;
+ }
- private StorageOperationStatus upgradeServices() {
- LOGGER.info("Starting upgrade services upon upgrade migration 1710 process. ");
- Either<List<String>, TitanOperationStatus> getServicesRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.SERVICE);
- if (getServicesRes.isRight()) {
- return StorageOperationStatus.GENERAL_ERROR;
+ private void cleanup(MigrationResult.MigrationStatus status) {
+ if (status == MigrationResult.MigrationStatus.COMPLETED ) {
+ log.info("Upgrade migration 1710 has been successfully finished. ");
+ titanDao.commit();
+ } else {
+ log.info("Upgrade migration 1710 was failed. ");
+ titanDao.rollback();
+ }
+ outputHandler.writeOutputAndCloseFile();
+ if (!isNodeTypesSupportOnly && isLockSucceeded) {
+ //delete rest of components if their upgrade failed
+ markedAsDeletedResourcesCnt = maxDeleteComponents;
+ deleteResourcesIfLimitIsReached();
+ markedAsDeletedServicesCnt = maxDeleteComponents;
+ deleteServicesIfLimitIsReached();
+ unlockDeleteOperation();
}
- for (String currUid : getServicesRes.left().value()) {
- upgradeServiceAndCommitIfNeeded(currUid, this::shouldUpgrade);
+ }
+
+ void upgradeServices(List<String> uniqueIDs, Predicate<org.openecomp.sdc.be.model.Component> shouldUpgrade, final String containerName) {
+ log.info("Starting upgrade {} upon upgrade migration 1710 process. ", containerName);
+ for (String currUid : uniqueIDs) {
+ upgradeServiceAndCommitIfNeeded(currUid, shouldUpgrade);
}
- return StorageOperationStatus.OK;
+ log.info("Upgrade {} upon upgrade migration 1710 process is finished. ", containerName);
}
private void upgradeServiceAndCommitIfNeeded(String currUid, Predicate<org.openecomp.sdc.be.model.Component> shouldUpgrade) {
@@ -213,35 +292,49 @@ public class UpgradeMigration1710 implements PostMigration {
result = handleService(currUid, shouldUpgrade);
} catch (Exception e) {
result = false;
- LOGGER.error("Failed to upgrade Service with uniqueId {} due to a reason {}. ", currUid, e);
+ log.error("Failed to upgrade service with uniqueId {} due to a reason {}. ", currUid, e.getMessage());
+ log.debug("Failed to upgrade service with uniqueId {}", currUid, e);
}
finally {
if (result) {
+ log.info("Service upgrade finished successfully: uniqueId {} ", currUid);
titanDao.commit();
}
else {
+ log.error("Failed to upgrade service with uniqueId {} ", currUid);
titanDao.rollback();
}
+ markCheckedOutServiceAsDeletedIfUpgradeFailed(currUid, result);
}
}
-
- private void upgradeProxyServiceContainers() {
- LOGGER.info("Starting upgrade proxy service containers upon upgrade migration 1710 process. ");
- for(String currUid : proxyServiceContainers){
- upgradeServiceAndCommitIfNeeded(currUid, component -> true);
+
+ private void upgradeAllottedVfContainers() {
+ upgradeServices(allottedVfContainers, component -> true, "proxy " + allottedVfContainers.size() + " service containers");
+ }
+
+ private void upgradeServices() {
+ Either<List<String>, TitanOperationStatus> getServicesRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.SERVICE);
+ if (getServicesRes.isRight()) {
+ log.error("Failed to retrieve the latest certified service versions");
+ return;
}
- }
+ upgradeServices(getServicesRes.left().value(), this::shouldUpgrade, "services");
+ }
+
+ private void upgradeProxyServiceContainers() {
+ upgradeServices(proxyServiceContainers, component -> true, "proxy service containers");
+ }
private boolean handleService(String uniqueId, Predicate<org.openecomp.sdc.be.model.Component> shouldUpgrade) {
- LOGGER.info("Starting upgrade Service with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
+ log.info("Starting upgrade Service with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getServiceRes = toscaOperationFacade.getToscaElement(uniqueId);
if(getServiceRes.isRight()){
- LOGGER.error("Failed to upgrade service with uniqueId {} due to {}. ", uniqueId, getServiceRes.right().value());
+ log.error("Failed to upgrade service with uniqueId {} due to {}. ", uniqueId, getServiceRes.right().value());
outputHandler.addRecord(ComponentTypeEnum.SERVICE.name(), UNKNOWN, UNKNOWN, uniqueId, MigrationResult.MigrationStatus.FAILED.name(), getServiceRes.right().value());
return false;
}
String derivedFromGenericType = getServiceRes.left().value().getDerivedFromGenericType();
- LOGGER.debug("derivedFromGenericType: {}", derivedFromGenericType );
+ log.debug("derivedFromGenericType: {}", derivedFromGenericType );
if (derivedFromGenericType == null) {
//malformed field value, upgrade required
return upgradeService(getServiceRes.left().value());
@@ -249,7 +342,7 @@ public class UpgradeMigration1710 implements PostMigration {
if(!latestGenericTypes.containsKey(derivedFromGenericType)){
Either<List<GraphVertex>, TitanOperationStatus> getDerivedRes = findDerivedResources(derivedFromGenericType);
if(getDerivedRes.isRight()){
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, getServiceRes.left().value().getComponentType().getValue(), getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getVersion(), "findDerivedResources", getDerivedRes.right().value());
+ log.error(FAILED_TO_UPGRADE_COMPONENT, getServiceRes.left().value().getComponentType().getValue(), getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getVersion(), "findDerivedResources", getDerivedRes.right().value());
outputHandler.addRecord( getServiceRes.left().value().getComponentType().name(),getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getDerivedRes.right().value());
return false;
}
@@ -279,60 +372,85 @@ public class UpgradeMigration1710 implements PostMigration {
return true;
}
- private boolean addComponent(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
- VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
- Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
- if (getOriginRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "toscaOperationFacade.getLatestCertifiedByToscaResourceName", getOriginRes.right().value());
- outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getOriginRes.right().value());
- return false;
- }
- latestOriginResourceVersions.put(instance.getToscaComponentName(), getOriginRes.left().value().getVersion());
- return true;
- }
+ private boolean addComponent(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
+ VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
+ Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
+ if (getOriginRes.isRight()) {
+ log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "toscaOperationFacade.getLatestCertifiedByToscaResourceName", getOriginRes.right().value());
+ outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getOriginRes.right().value());
+ return false;
+ }
+ latestOriginResourceVersions.put(instance.getToscaComponentName(), getOriginRes.left().value().getVersion());
+ return true;
+ }
private boolean shouldUpgrade(org.openecomp.sdc.be.model.Component component) {
- boolean shouldUpgrade = false;
- if(CollectionUtils.isNotEmpty(component.getComponentInstances())){
+ if(CollectionUtils.isNotEmpty(component.getComponentInstances())) {
+ if (containsProxyOrAllottedVF(component)) {
+ return false;
+ }
for(ComponentInstance instance : component.getComponentInstances()){
- if(instance.getOriginType() == OriginTypeEnum.ServiceProxy){
- LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains Service proxy instance {}, than the service should be upgraded in the end of the upgrading proccess. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName());
- proxyServiceContainers.add(component.getUniqueId());
- shouldUpgrade = false;
- break;
- }
- if(isAllottedResource(instance.getActualComponentUid())){
- allottedVfContainers.add(component.getUniqueId());
- }
if(isGreater(latestOriginResourceVersions.get(instance.getToscaComponentName()), instance.getComponentVersion())){
- LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains instance {} from outdated version of origin {} {} , than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName(), instance.getComponentName(), instance.getComponentVersion());
- shouldUpgrade = true;
+ log.info("The service with name {}, invariantUUID {}, version {}, contains instance {} from outdated version of origin {} {} , than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName(), instance.getComponentName(), instance.getComponentVersion());
+ return true;
}
}
}
- return shouldUpgrade;
+ return false;
+ }
+
+ private boolean containsProxyOrAllottedVF(org.openecomp.sdc.be.model.Component component) {
+ return !component.getComponentInstances()
+ .stream()
+ .filter(i->isProxyOrAllottedVF(i, component.getUniqueId()))
+ .collect(Collectors.toList()).isEmpty();
+ }
+
+ private boolean isProxyOrAllottedVF(ComponentInstance instance, String uniqueId) {
+ if (instance.getOriginType() == OriginTypeEnum.ServiceProxy) {
+ keepProxyServiceContainerIfSupported(uniqueId);
+ return true;
+ }
+ if (isAllottedResource(instance.getActualComponentUid())) {
+ keepAllottedVfContainerIfSupported(uniqueId);
+ return true;
+ }
+ return false;
+ }
+
+ private void keepAllottedVfContainerIfSupported(final String uniqueId) {
+ if (isAllottedAndProxySupported && !allottedVfContainers.contains(uniqueId)) {
+ log.info("Add a service with uniqueId {} to allotted VF containers container list", uniqueId);
+ allottedVfContainers.add(uniqueId);
+ }
+ }
+
+ private void keepProxyServiceContainerIfSupported(final String uniqueId) {
+ if (isAllottedAndProxySupported && !proxyServiceContainers.contains(uniqueId)) {
+ log.info("Add a service with uniqueId {} to proxy service container list", uniqueId);
+ proxyServiceContainers.add(uniqueId);
+ }
}
private boolean upgradeService(org.openecomp.sdc.be.model.Component service) {
String serviceName = service.getName();
String serviceUuid = service.getUUID();
- LOGGER.info("Starting upgrade Service with name {}, invariantUUID {}, version {} upon upgrade migration 1710 process. ", serviceName, service.getInvariantUUID(), service.getVersion());
- LOGGER.info("Starting to perform check out of service {}. ", serviceName);
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(service.getComponentType(), service.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
+ log.info("Starting upgrade Service with name {}, invariantUUID {}, version {} upon upgrade migration 1710 process. ", serviceName, service.getInvariantUUID(), service.getVersion());
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = checkOutComponent(service);
if (checkouRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "lifecycleBusinessLogic.changeComponentState", checkouRes.right().value().getFormattedMessage());
+ log.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "lifecycleBusinessLogic.changeComponentState", checkouRes.right().value().getFormattedMessage());
outputHandler.addRecord(service.getComponentType().name(), serviceName, serviceUuid, service.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
return false;
}
Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes = updateComposition(checkouRes.left().value());
if (updateCompositionRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
+ log.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage());
return false;
}
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
if (certifyRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "performFullCertification", certifyRes.right().value().getFormattedMessage());
+ log.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "performFullCertification", certifyRes.right().value().getFormattedMessage());
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
return false;
}
@@ -341,20 +459,22 @@ public class UpgradeMigration1710 implements PostMigration {
}
private Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateComposition(org.openecomp.sdc.be.model.Component component) {
- Either<ComponentInstance, ResponseFormat> upgradeInstanceRes;
- for (ComponentInstance instance : component.getComponentInstances()) {
- upgradeInstanceRes = upgradeInstance(component, instance);
- if (upgradeInstanceRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage());
- outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage());
- return Either.right(upgradeInstanceRes.right().value());
+ if (component != null && component.getComponentInstances() != null) {
+ Either<ComponentInstance, ResponseFormat> upgradeInstanceRes;
+ for (ComponentInstance instance : component.getComponentInstances()) {
+ upgradeInstanceRes = upgradeInstance(component, instance);
+ if (upgradeInstanceRes.isRight()) {
+ log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage());
+ outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage());
+ return Either.right(upgradeInstanceRes.right().value());
+ }
}
}
return Either.left(component);
}
private Either<ComponentInstance, ResponseFormat> upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
- LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
+ log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
ComponentInstance newComponentInstance = new ComponentInstance(instance);
if (instance.getOriginType() == OriginTypeEnum.ServiceProxy) {
return upgradeServiceProxyInstance(component, instance, newComponentInstance);
@@ -364,92 +484,98 @@ public class UpgradeMigration1710 implements PostMigration {
private Either<ComponentInstance, ResponseFormat> upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
- LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
+ log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
Either<ComponentInstance, ResponseFormat> upgradeInstanceRes = null;
VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
if(getOriginRes.isRight()){
- LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
+ log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
}
- if(upgradeInstanceRes == null){
- newComponentInstance.setComponentName(getOriginRes.left().value().getName());
- newComponentInstance.setComponentUid(getOriginRes.left().value().getUniqueId());
- newComponentInstance.setComponentVersion(getOriginRes.left().value().getVersion());
- newComponentInstance.setToscaComponentName(((Resource)getOriginRes.left().value()).getToscaResourceName());
- if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
- upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
- }
- if((upgradeInstanceRes == null || upgradeInstanceRes.isLeft()) && isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
- ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes.left().value();
- upgradeInstanceRes = Either.left(updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId())));
- }
+ if(upgradeInstanceRes == null) {
+ copyComponentNameAndVersionToNewInstance(newComponentInstance, getOriginRes.left().value());
+
+ if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
+ upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
+ }
+ if((upgradeInstanceRes == null || upgradeInstanceRes.isLeft()) && isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
+ ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes.left().value();
+ upgradeInstanceRes = Either.left(updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId())));
+ }
}
//upgrade nodes contained by CVFC
if(upgradeInstanceRes == null && isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC &&
- !upgradeVf(getOriginRes.left().value().getUniqueId(), false)) {
- upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
+ !upgradeVf(getOriginRes.left().value().getUniqueId(), false, true)) {
+ upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
}
if(upgradeInstanceRes == null){
- upgradeInstanceRes = Either.left(instance);
+ upgradeInstanceRes = Either.left(instance);
}
- LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ",
- component.getComponentType().getValue(), instance.getName());
+ log.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ",
+ component.getComponentType().getValue(), instance.getName());
return upgradeInstanceRes;
}
+ private void copyComponentNameAndVersionToNewInstance(ComponentInstance newComponentInstance, Resource originResource) {
+ newComponentInstance.setComponentName(originResource.getName());
+ newComponentInstance.setComponentUid(originResource.getUniqueId());
+ newComponentInstance.setComponentVersion(originResource.getVersion());
+ newComponentInstance.setToscaComponentName(originResource.getToscaResourceName());
+ }
+
private ComponentInstance updateServiceUuidProperty(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, List<ComponentInstanceProperty> instanceProperties){
- if(isAllottedResource(instance.getComponentUid()) && instanceProperties != null){
- Optional<ComponentInstanceProperty> propertyUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_UUID_RPOPERTY)).findFirst();
- Optional<ComponentInstanceProperty> propertyInvariantUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_INVARIANT_UUID_RPOPERTY)).findFirst();
- if(propertyUuid.isPresent() && propertyInvariantUuid.isPresent()){
- String serviceInvariantUUID = propertyInvariantUuid.get().getValue();
- Either<List<GraphVertex>, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(serviceInvariantUUID);
- if (getLatestOriginServiceRes.isRight()) {
- return instance;
- }
- propertyUuid.get().setValue((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UUID));
- componentInstanceBusinessLogic.createOrUpdatePropertiesValues(component.getComponentType(), component.getUniqueId(), instance.getUniqueId(), Lists.newArrayList(propertyUuid.get()), user.getUserId())
- .right()
- .forEach(e -> LOGGER.debug("Failed to update property {} of the instance {} of the component {}. ", SERVICE_UUID_RPOPERTY, instance.getUniqueId(), component.getName()));
- }
- }
- return instance;
- }
-
+ if(isAllottedResource(instance.getComponentUid()) && instanceProperties != null){
+ Optional<ComponentInstanceProperty> propertyUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_UUID_RPOPERTY)).findFirst();
+ Optional<ComponentInstanceProperty> propertyInvariantUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_INVARIANT_UUID_RPOPERTY)).findFirst();
+ if(propertyUuid.isPresent() && propertyInvariantUuid.isPresent()){
+ String serviceInvariantUUID = propertyInvariantUuid.get().getValue();
+ Either<List<GraphVertex>, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(serviceInvariantUUID);
+ if (getLatestOriginServiceRes.isRight()) {
+ return instance;
+ }
+ propertyUuid.get().setValue((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UUID));
+ componentInstanceBusinessLogic.createOrUpdatePropertiesValues(component.getComponentType(), component.getUniqueId(), instance.getUniqueId(), Lists.newArrayList(propertyUuid.get()), user.getUserId())
+ .right()
+ .forEach(e -> log.debug("Failed to update property {} of the instance {} of the component {}. ", SERVICE_UUID_RPOPERTY, instance.getUniqueId(), component.getName()));
+ }
+ }
+ return instance;
+ }
+
private boolean isAllottedResource(String uniqueId){
- ComponentParametersView filters = new ComponentParametersView(true);
- filters.setIgnoreCategories(false);
- Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getResourceRes = toscaOperationFacade.getToscaElement(uniqueId, filters);
- if(getResourceRes.isRight()){
- return false;
- }
- if(getResourceRes.left().value().getCategories() != null && getResourceRes.left().value().getCategories().get(0)!= null){
- return "Allotted Resource".equals(getResourceRes.left().value().getCategories().get(0).getName());
- }
- return false;
- }
-
+ ComponentParametersView filters = new ComponentParametersView(true);
+ filters.setIgnoreCategories(false);
+ Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getResourceRes = toscaOperationFacade.getToscaElement(uniqueId, filters);
+ if(getResourceRes.isRight()){
+ return false;
+ }
+ if(getResourceRes.left().value().getCategories() != null && getResourceRes.left().value().getCategories().get(0)!= null){
+ return ALLOTTED_RESOURCE_NAME.equals(getResourceRes.left().value().getCategories().get(0).getName());
+ }
+ return false;
+ }
+
private boolean isAllottedVf(org.openecomp.sdc.be.model.Component component){
- if(component.getComponentType() != ComponentTypeEnum.RESOURCE){
- return false;
- }
- if(((Resource)component).getResourceType() != ResourceTypeEnum.VF){
- return false;
- }
- return isAllottedResource(component.getUniqueId());
- }
-
+ if(component.getComponentType() != ComponentTypeEnum.RESOURCE || ((Resource)component).getResourceType() != ResourceTypeEnum.VF){
+ return false;
+ }
+ return isAllottedResource(component.getUniqueId());
+ }
+
private Either<ComponentInstance, ResponseFormat> upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
Either<List<GraphVertex>, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(instance.getSourceModelInvariant());
if (getLatestOriginServiceRes.isRight()) {
return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertTitanStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType())));
}
- newComponentInstance.setComponentVersion((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.VERSION));
- newComponentInstance.setSourceModelUid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
- newComponentInstance.setSourceModelName((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.NAME));
- newComponentInstance.setSourceModelUuid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UUID));
+ ModelConverter.getVertexType(instance.getOriginType().name());
+ Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestByName(instance.getComponentName());
+ if(getOriginRes.isRight()){
+ log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
+ component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
+ return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+ }
+ newComponentInstance.setComponentUid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
return changeAssetVersion(component, instance, newComponentInstance);
}
@@ -470,271 +596,319 @@ public class UpgradeMigration1710 implements PostMigration {
}
private boolean upgradeNodeTypes() {
- LOGGER.info("Starting upgrade node types upon upgrade migration 1710 process. ");
- String toscaConformanceLevel = ConfigurationManager.getConfigurationManager().getConfiguration().getToscaConformanceLevel();
- Map<String, List<String>> resourcesForUpgrade = ConfigurationManager.getConfigurationManager().getConfiguration().getResourcesForUpgrade();
- Map<String, org.openecomp.sdc.be.model.Component> upgradedNodeTypesMap = new HashMap<>();
- List<String> nodeTypes;
- if (resourcesForUpgrade.containsKey(toscaConformanceLevel)) {
- nodeTypes = resourcesForUpgrade.get(toscaConformanceLevel);
- if (nodeTypes != null && !nodeTypes.isEmpty()) {
- Either<List<String>, TitanOperationStatus> getRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.NODE_TYPE, ComponentTypeEnum.RESOURCE);
- if (getRes.isRight()) {
+ log.info("Starting upgrade node types upon upgrade migration 1710 process. ");
+ if (nodeTypes != null && !nodeTypes.isEmpty()) {
+ Either<List<String>, TitanOperationStatus> getRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.NODE_TYPE, ComponentTypeEnum.RESOURCE);
+ if (getRes.isRight()) {
+ return false;
+ }
+ for (String toscaResourceName : nodeTypes) {
+ if (!upgradeNodeType(toscaResourceName, getRes.left().value())) {
return false;
}
- List<String> allNodeTypes = getRes.left().value();
-
- for (String toscaResourceName : nodeTypes) {
- Either<List<GraphVertex>, StorageOperationStatus> status = getLatestByName(GraphPropertyEnum.TOSCA_RESOURCE_NAME, toscaResourceName);
- if (status.isRight()) {
- LOGGER.error("Failed to find node type {} ", toscaResourceName);
- return false;
- }
- List<GraphVertex> vList = status.left().value();
- for (GraphVertex vertex : vList) {
- StorageOperationStatus updateRes = upgradeNodeType(vertex, upgradedNodeTypesMap, allNodeTypes, nodeTypes);
- if (updateRes != StorageOperationStatus.OK) {
- return false;
- }
- }
- }
}
}
+ else {
+ log.info("No node types for upgrade are configured");
+ }
return true;
}
- private boolean upgradeVFs() {
- return upgradeVFs(false);
- }
-
- private boolean upgradeAllottedVFs() {
- LOGGER.info("Starting upgrade {} allotted Vfs with upon upgrade migration 1710 process. ", vfAllottedResources.size());
- return upgradeVFs(true);
+ private boolean upgradeNodeType(String toscaResourceName, List<String> allNodeTypes) {
+ Either<List<GraphVertex>, StorageOperationStatus> status = getLatestByName(GraphPropertyEnum.TOSCA_RESOURCE_NAME, toscaResourceName);
+ if (status.isRight()) {
+ log.error("Failed to find node type {} ", toscaResourceName);
+ return false;
+ }
+ List<GraphVertex> vList = status.left().value();
+ for (GraphVertex vertex : vList) {
+ StorageOperationStatus updateRes = upgradeNodeType(vertex, allNodeTypes);
+ if (updateRes != StorageOperationStatus.OK) {
+ return false;
+ }
+ }
+ return true;
}
-
- private boolean upgradeVFs(boolean allottedVfsUpgrade) {
- LOGGER.info("Starting upgrade VFs upon upgrade migration 1710 process. ");
+
+ private boolean upgradeVFs() {
+ log.info("Starting upgrade VFs upon upgrade migration 1710 process. ");
Either<List<String>, TitanOperationStatus> getVfsRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.RESOURCE);
if (getVfsRes.isRight()) {
- LOGGER.info(UPGRADE_VFS_FAILED);
+ log.info(UPGRADE_VFS_FAILED);
return false;
}
- for (String currUid : getVfsRes.left().value()) {
+ return upgradeVFs(getVfsRes.left().value(), false);
+ }
+
+ private boolean upgradeAllottedVFs() {
+ log.info("Starting upgrade {} allotted Vfs with upon upgrade migration 1710 process. ", vfAllottedResources.size());
+ return upgradeVFs(vfAllottedResources, true);
+ }
+
+ boolean upgradeVFs(List<String> resourceList, boolean isAllottedVfsUpgrade) {
+ for (String currUid : resourceList) {
boolean result = true;
try {
- result = upgradeVf(currUid, allottedVfsUpgrade);
+ result = upgradeVf(currUid, isAllottedVfsUpgrade, false);
if (!result && !skipIfUpgradeVfFailed) {
return false;
}
} catch (Exception e) {
- LOGGER.error("The exception {} occured upon upgrade VFs. ", e);
+ log.error("The exception {} occurred upon upgrade VFs. ", e.getMessage());
+ log.debug("The exception occurred upon upgrade VFs:", e);
result = false;
if (!skipIfUpgradeVfFailed) {
- return false;
+ return false;
}
}
finally {
- if (!result) {
- LOGGER.error("Failed to upgrade RESOURCE with uniqueId {} ", currUid);
- titanDao.rollback();
+ if (result) {
+ log.info("Resource upgrade finished successfully: uniqueId {} ", currUid);
+ titanDao.commit();
}
else {
- LOGGER.info("RESOURCE upgrade finished successfully: uniqueId {} ", currUid);
- titanDao.commit();
+ log.error("Failed to upgrade resource with uniqueId {} ", currUid);
+ titanDao.rollback();
}
+ markCheckedOutResourceAsDeletedIfUpgradeFailed(currUid, result);
}
}
- LOGGER.info("Upgrade VFs upon upgrade migration 1710 process finished successfully. ");
+ log.info("Upgrade VFs upon upgrade migration 1710 process finished successfully. ");
return true;
}
- private boolean upgradeVf(String uniqueId, boolean allottedVfsUpgrade) {
- LOGGER.info("Starting upgrade VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
- Either<String, StorageOperationStatus> latestVersionRes;
+ private boolean upgradeVf(String uniqueId, boolean allottedVfsUpgrade, boolean isInstance) {
+ log.info("Starting upgrade VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getRes = toscaOperationFacade.getToscaElement(uniqueId);
if (getRes.isRight()) {
- LOGGER.debug("Failed to fetch VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
+ log.debug("Failed to fetch VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
outputHandler.addRecord(ComponentTypeEnum.RESOURCE.name(), UNKNOWN, UNKNOWN, uniqueId, MigrationResult.MigrationStatus.FAILED.name(), getRes.right().value());
return false;
}
if(!allottedVfsUpgrade && isAllottedVf(getRes.left().value())){
- vfAllottedResources.add(uniqueId);
- return true;
+ keepAllottedResourceIfSupported(uniqueId);
+ return true;
}
if (StringUtils.isNotEmpty(getRes.left().value().getCsarUUID())) {
- LOGGER.info("Going to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
- latestVersionRes = csarOperation.getCsarLatestVersion(getRes.left().value().getCsarUUID(), user);
+ log.info("Going to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
+ Either<String, StorageOperationStatus> latestVersionRes = csarOperation.getCsarLatestVersion(getRes.left().value().getCsarUUID(), user);
if (latestVersionRes.isRight()) {
- LOGGER.debug("Failed to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
+ log.debug("Failed to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
outputHandler.addRecord(getRes.left().value().getComponentType().name(), getRes.left().value().getName(), getRes.left().value().getUUID(), getRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), latestVersionRes.right().value());
return false;
}
if (isGreater(latestVersionRes.left().value(), getRes.left().value().getCsarVersion())) {
- return upgradeVfWithLatestVsp(getRes.left().value(), latestVersionRes);
+ return upgradeVfWithLatestVsp(getRes.left().value(), latestVersionRes.left().value(), isInstance);
}
- if (!isVfcUpgradeRequired) {
- LOGGER.warn("Warning: No need to upgrade VF with name {}, invariantUUID {}, version {} and VSP version {}. No new version of VSP. ", getRes.left().value().getName(), getRes.left().value().getInvariantUUID(), getRes.left().value().getVersion(), getRes.left().value().getCsarVersion());
+ if (isVfcUpgradeRequired) {
+ return upgradeComponentWithLatestGeneric(getRes.left().value(), isInstance);
}
+ log.warn("Warning: No need to upgrade VF with name {}, invariantUUID {}, version {} and VSP version {}. No new version of VSP. ", getRes.left().value().getName(), getRes.left().value().getInvariantUUID(), getRes.left().value().getVersion(), getRes.left().value().getCsarVersion());
+ return true;
+ }
+ else {
+ return upgradeComponentWithLatestGeneric(getRes.left().value(), isInstance);
+ }
+ }
+
+ private void keepAllottedResourceIfSupported(final String uniqueId) {
+ if (isAllottedAndProxySupported && !vfAllottedResources.contains(uniqueId)) {
+ log.info("Add a resource with uniqueId {} to allotted resource list", uniqueId);
+ vfAllottedResources.add(uniqueId);
}
- return upgradeComponentWithLatestGeneric(getRes.left().value());
}
- private boolean upgradeVfWithLatestVsp(org.openecomp.sdc.be.model.Component vf, Either<String, StorageOperationStatus> latestVersionRes) {
- LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {} and latest VSP version {} upon upgrade migration 1710 process. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
- LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion());
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(vf.getComponentType(), vf.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
+ private boolean upgradeVfWithLatestVsp(org.openecomp.sdc.be.model.Component vf, String latestVersion, boolean isInstance) {
+ log.info("Starting upgrade vf with name {}, invariantUUID {}, version {} and latest VSP version {} upon upgrade migration 1710 process. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersion);
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = checkOutComponent(vf);
if (checkouRes.isRight()) {
outputHandler.addRecord(vf.getComponentType().name(), vf.getName(), vf.getUUID(), vf.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
return false;
}
- LOGGER.info("Starting update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
Resource resourceToUpdate = new Resource(((Resource) checkouRes.left().value()).getComponentMetadataDefinition());
resourceToUpdate.setDerivedFromGenericType(((Resource) checkouRes.left().value()).getDerivedFromGenericType());
resourceToUpdate.setDerivedFromGenericVersion(((Resource) checkouRes.left().value()).getDerivedFromGenericVersion());
- resourceToUpdate.setCsarVersion(Double.toString(Double.parseDouble(latestVersionRes.left().value())));
- Either<Resource, ResponseFormat> updateResourceFromCsarRes = resourceBusinessLogic.validateAndUpdateResourceFromCsar(resourceToUpdate, user, null, null, resourceToUpdate.getUniqueId());
- if (updateResourceFromCsarRes.isRight()) {
- outputHandler.addRecord(resourceToUpdate.getComponentType().name(), resourceToUpdate.getName(), resourceToUpdate.getUUID(), resourceToUpdate.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateResourceFromCsarRes.right().value().getFormattedMessage());
- LOGGER.info("Failed to update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
+ resourceToUpdate.setCsarVersion(Double.toString(Double.parseDouble(latestVersion)));
+ resourceToUpdate.setCategories(((Resource)checkouRes.left().value()).getCategories());
+ try {
+ Resource updateResourceFromCsarRes = resourceBusinessLogic.validateAndUpdateResourceFromCsar(resourceToUpdate, user, null, null, resourceToUpdate.getUniqueId());
+ } catch(ComponentException e){
+ outputHandler.addRecord(resourceToUpdate.getComponentType().name(), resourceToUpdate.getName(), resourceToUpdate.getUUID(), resourceToUpdate.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), e.getResponseFormat().getFormattedMessage());
+ log.info("Failed to update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersion);
return false;
}
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
if (certifyRes.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), LifeCycleTransitionEnum.CERTIFY);
+ log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), LifeCycleTransitionEnum.CERTIFY);
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
return false;
}
- LOGGER.info("Full certification of vf with name {}, invariantUUID {}, version {} finished . ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
- outputHandler.addRecord(certifyRes.left().value().getComponentType().name(), certifyRes.left().value().getName(), certifyRes.left().value().getUUID(), certifyRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), UpgradeStatus.UPGRADED);
+ log.info("Full certification of vf with name {}, invariantUUID {}, version {} finished . ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersion);
+ outputHandler.addRecord(certifyRes.left().value().getComponentType().name(), certifyRes.left().value().getName(), certifyRes.left().value().getUUID(), certifyRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), getVfUpgradeStatus(true, isInstance));
return true;
}
- private boolean upgradeComponentWithLatestGeneric(org.openecomp.sdc.be.model.Component component) {
+ private boolean upgradeComponentWithLatestGeneric(org.openecomp.sdc.be.model.Component component, boolean isInstance) {
String derivedFromGenericType = component.getDerivedFromGenericType();
String derivedFromGenericVersion = component.getDerivedFromGenericVersion();
org.openecomp.sdc.be.model.Component updatedComponent = component;
+ if (failedToFindDerivedResourcesOfNodeType(component, derivedFromGenericType, derivedFromGenericVersion)) {
+ return false;
+ }
+ if (StringUtils.isEmpty(derivedFromGenericType) ||
+ latestVersionExists(latestGenericTypes.get(derivedFromGenericType), derivedFromGenericVersion) ||
+ isVfcUpgradeRequired ||
+ isAllottedAndProxySupported) {
+ if (StringUtils.isNotEmpty(derivedFromGenericType)) {
+ log.info("Newer version {} of derived from generic type {} exists. ", latestGenericTypes.get(derivedFromGenericType).getJsonMetadataField(JsonPresentationFields.VERSION), derivedFromGenericType);
+ }
+ else {
+ log.info("The vf resource with name {}, invariantUUID {}, version {}, has an empty derivedFromGenericType field. ", component.getName(), component.getInvariantUUID(), component.getVersion());
+ }
+ updatedComponent = checkOutAndCertifyComponent(component);
+ } else {
+ log.info("The version {} of derived from generic type {} is up to date. No need to upgrade component with name {}, invariantUUID {} and version {}. ", latestGenericTypes.get(derivedFromGenericType), derivedFromGenericType, component.getName(), component.getInvariantUUID(), component.getVersion());
+ }
+ if (updatedComponent != null) {
+ log.info(UPGRADE_COMPONENT_SUCCEEDED, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
+ outputHandler.addRecord(updatedComponent.getComponentType().name(), updatedComponent.getName(), updatedComponent.getUUID(), updatedComponent.getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(),
+ getVfUpgradeStatus(!updatedComponent.equals(component), isInstance));
+ }
+ return true;
+ }
+
+ private org.openecomp.sdc.be.model.Component checkOutAndCertifyComponent(org.openecomp.sdc.be.model.Component component) {
+
+ log.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkoutRes = checkOutComponent(component);
+ if (checkoutRes.isRight()) {
+ log.error(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CHECKOUT);
+ outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkoutRes.right().value().getFormattedMessage());
+ return null;
+ }
+
+ if (!updateCompositionFailed(component, checkoutRes.left().value())) {
+ return null;
+ }
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkoutRes.left().value());
+ if (certifyRes.isRight()) {
+ log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "performFullCertification", certifyRes.right().value());
+ outputHandler.addRecord(checkoutRes.left().value().getComponentType().name(), checkoutRes.left().value().getName(), checkoutRes.left().value().getInvariantUUID(), checkoutRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
+ return null;
+ }
+ return certifyRes.left().value();
+ }
+
+ private boolean failedToFindDerivedResourcesOfNodeType(org.openecomp.sdc.be.model.Component component, String derivedFromGenericType, String derivedFromGenericVersion) {
if (StringUtils.isNotEmpty(derivedFromGenericType) && !latestGenericTypes.containsKey(derivedFromGenericType)) {
- LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {}, latest derived from generic type {}, latest derived from generic version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType, derivedFromGenericVersion);
- LOGGER.info("Starting to fetch latest generic node type {}. ", derivedFromGenericType);
+ log.info("Starting upgrade vf with name {}, invariantUUID {}, version {}, latest derived from generic type {}, latest derived from generic version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType, derivedFromGenericVersion);
+ log.info("Starting to fetch latest generic node type {}. ", derivedFromGenericType);
Either<List<GraphVertex>, TitanOperationStatus> getDerivedRes = findDerivedResources(derivedFromGenericType);
if (getDerivedRes.isRight()) {
outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getDerivedRes.right().value());
- LOGGER.info("Failed to upgrade component with name {}, invariantUUID {}, version {} and latest generic. Status is {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType);
- return false;
+ log.info("Failed to upgrade component with name {}, invariantUUID {}, version {} and latest generic. Status is {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType);
+ return true;
}
latestGenericTypes.put(derivedFromGenericType, getDerivedRes.left().value().get(0));
}
- if (StringUtils.isEmpty(derivedFromGenericType) ||
- latestVersionExists(latestGenericTypes.get(derivedFromGenericType), derivedFromGenericVersion) ||
- isVfcUpgradeRequired) {
- if (StringUtils.isNotEmpty(derivedFromGenericType))
- LOGGER.info("Newer version {} of derived from generic type {} exists. ", latestGenericTypes.get(derivedFromGenericType).getJsonMetadataField(JsonPresentationFields.VERSION), derivedFromGenericType);
- else
- LOGGER.info("The vf resource with name {}, invariantUUID {}, version {}, has an empty derivedFromGenericType field. ", component.getName(), component.getInvariantUUID(), component.getVersion());
-
- LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if (checkouRes.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CHECKOUT);
- outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
- return false;
- }
- //update included VFCs, if it is required as per configuration
- if (CollectionUtils.isNotEmpty(checkouRes.left().value().getComponentInstances())) {
- LOGGER.info("VFC upgrade is required: updating components of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
- Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes =
- updateComposition(checkouRes.left().value());
- if (updateCompositionRes.isRight()) {
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
- outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage());
- return false;
+ return false;
+ }
+
+ private boolean updateCompositionFailed(org.openecomp.sdc.be.model.Component component, org.openecomp.sdc.be.model.Component checkoutResource) {
+ //try to update included VFCs, if it is either required as per configuration or an allotted resource
+ if ((isVfcUpgradeRequired && CollectionUtils.isNotEmpty(checkoutResource.getComponentInstances())) || isAllottedAndProxySupported) {
+ log.info("VFC upgrade is required: updating components of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
+ Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes = updateComposition(checkoutResource);
+ if (updateCompositionRes.isRight()) {
+ if (log.isErrorEnabled()) {
+ log.error(FAILED_TO_UPGRADE_COMPONENT, checkoutResource.getComponentType().name(), checkoutResource.getName(), checkoutResource.getInvariantUUID(), checkoutResource.getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
}
+ outputHandler.addRecord(checkoutResource.getComponentType().name(), checkoutResource.getName(), checkoutResource.getUUID(), checkoutResource.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage());
+ return true;
}
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
- if (certifyRes.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
- outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
- return false;
- }
- updatedComponent = certifyRes.left().value();
- } else {
- LOGGER.info("The version {} of derived from generic type {} is up to date. No need to upgrade component with name {}, invariantUUID {} and version {}. ", latestGenericTypes.get(derivedFromGenericType), derivedFromGenericType, component.getName(), component.getInvariantUUID(), component.getVersion());
}
- LOGGER.info(UPGRADE_COMPONENT_SUCCEEDED, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
- outputHandler.addRecord(updatedComponent.getComponentType().name(), updatedComponent.getName(), updatedComponent.getUUID(), updatedComponent.getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), updatedComponent.equals(component) ? UpgradeStatus.NOT_UPGRADED : UpgradeStatus.UPGRADED);
- return true;
+ return false;
}
- private StorageOperationStatus upgradeNodeType(GraphVertex nodeTypeV, Map<String, org.openecomp.sdc.be.model.Component> upgradedNodeTypesMap, List<String> allCertifiedUids, List<String> nodeTypes) {
+ private StorageOperationStatus upgradeNodeType(GraphVertex nodeTypeV, List<String> allCertifiedUids) {
StorageOperationStatus result = StorageOperationStatus.OK;
- LOGGER.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
- LOGGER.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
+ log.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
+ log.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
Either<List<GraphVertex>, TitanOperationStatus> parentResourceRes = titanDao.getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
if (parentResourceRes.isRight() && parentResourceRes.right().value() != TitanOperationStatus.NOT_FOUND) {
return DaoStatusConverter.convertTitanStatusToStorageStatus(parentResourceRes.right().value());
}
- List<GraphVertex> derivedResourcesUid = new ArrayList<>();
- if (parentResourceRes.isLeft()) {
- for (GraphVertex chV : parentResourceRes.left().value()) {
- Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String) chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
- if (op.isPresent()) {
- derivedResourcesUid.add(chV);
- }
- }
- }
+ List<GraphVertex> derivedResourcesUid = getAllDerivedGraphVertices(allCertifiedUids, parentResourceRes);
String uniqueId = (String) nodeTypeV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID);
+
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getRes = toscaOperationFacade.getToscaElement(uniqueId);
if (getRes.isRight()) {
- LOGGER.info("failed to fetch element with uniqueId {} ", uniqueId);
+ log.info("failed to fetch element with uniqueId {} ", uniqueId);
return getRes.right().value();
}
- org.openecomp.sdc.be.model.Resource nt = (Resource) getRes.left().value();
- boolean isNeedToUpgrade = true;
- if (upgradedNodeTypesMap.containsKey(nt.getToscaResourceName()) || nodeTypes.stream().anyMatch(p -> p.equals(nt.getToscaResourceName()))) {
- isNeedToUpgrade = false;
- }
- if (isNeedToUpgrade) {
- LOGGER.info("Starting to perform check out of node type with name {}, invariantUUID {}, version {}. ", nt.getName(), nt.getInvariantUUID(), nt.getVersion());
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(nt.getComponentType(), nt.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if (checkouRes.isRight()) {
- return StorageOperationStatus.GENERAL_ERROR;
- }
- org.openecomp.sdc.be.model.Component upgradetComp = checkouRes.left().value();
- boolean res = performFullCertification(upgradetComp).isLeft();
- if (!res) {
+ Resource nodeType = (Resource)getRes.left().value();
+ if (!upgradedNodeTypesMap.containsKey(nodeType.getToscaResourceName()) && !nodeTypes.stream().anyMatch(p -> p.equals(nodeType.getToscaResourceName()))
+ && !isNodeTypeUpgradeSucceeded((Resource) getRes.left().value())) {
return StorageOperationStatus.GENERAL_ERROR;
- }
- upgradedNodeTypesMap.put(nt.getToscaResourceName(), upgradetComp);
- titanDao.commit();
}
for (GraphVertex chV : derivedResourcesUid) {
- result = upgradeNodeType(chV, upgradedNodeTypesMap, allCertifiedUids, nodeTypes);
- LOGGER.info("Upgrade node type with name {}, invariantUUID {}, version {} has been finished with the status {}", chV.getMetadataProperty(GraphPropertyEnum.NAME), chV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), chV.getMetadataProperty(GraphPropertyEnum.VERSION), result);
+ result = upgradeNodeType(chV, allCertifiedUids);
+ log.info("Upgrade node type with name {}, invariantUUID {}, version {} has been finished with the status {}", chV.getMetadataProperty(GraphPropertyEnum.NAME), chV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), chV.getMetadataProperty(GraphPropertyEnum.VERSION), result);
}
return result;
}
+ private boolean isNodeTypeUpgradeSucceeded(Resource nodeType) {
+ log.info("Starting to perform check out of node type with name {}, invariantUUID {}, version {}. ", nodeType.getName(), nodeType.getInvariantUUID(), nodeType.getVersion());
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes =
+ lifecycleBusinessLogic.changeComponentState(nodeType.getComponentType(), nodeType.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
+ if (checkouRes.isRight()) {
+ log.info("Failed to check out node type with name {}, invariantUUID {} due to {}", nodeType.getName(), nodeType.getInvariantUUID(), checkouRes.right().value());
+ return false;
+ }
+ if (performFullCertification(checkouRes.left().value()).isLeft()) {
+ upgradedNodeTypesMap.put(nodeType.getToscaResourceName(), checkouRes.left().value());
+ titanDao.commit();
+ return true;
+ }
+ return false;
+ }
+
+ private List<GraphVertex> getAllDerivedGraphVertices(List<String> allCertifiedUids, Either<List<GraphVertex>, TitanOperationStatus> parentResources) {
+ List<GraphVertex> derivedResourcesUid = new ArrayList<>();
+
+ if (parentResources.isLeft()) {
+ for (GraphVertex chV : parentResources.left().value()) {
+ Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String) chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
+ if (op.isPresent()) {
+ derivedResourcesUid.add(chV);
+ }
+ }
+ }
+ return derivedResourcesUid;
+ }
+
private Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> performFullCertification(org.openecomp.sdc.be.model.Component component) {
- LOGGER.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ",
+ log.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ",
component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false);
if (changeStateEither.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST);
+ log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST);
return changeStateEither;
}
changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.START_CERTIFICATION, changeInfo, true, false);
if (changeStateEither.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION);
+ log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION);
return changeStateEither;
}
changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
if (changeStateEither.isRight()) {
- LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
+ log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
} else {
- LOGGER.info("Full certification of {} with name {}, invariantUUID {}, version {} finished successfully",
+ log.info("Full certification of {} with name {}, invariantUUID {}, version {} finished successfully",
changeStateEither.left().value().getComponentType().getValue(), changeStateEither.left().value().getName(),
changeStateEither.left().value().getInvariantUUID(), changeStateEither.left().value().getVersion());
}
@@ -756,22 +930,24 @@ public class UpgradeMigration1710 implements PostMigration {
}
private boolean isGreater(String latestVersion, String currentVersion) {
- if (latestVersion != null && currentVersion == null)
+ if (latestVersion != null && currentVersion == null) {
return true;
- if (latestVersion == null)
+ }
+ if (latestVersion == null) {
return false;
+ }
return Double.parseDouble(latestVersion) > Double.parseDouble(currentVersion);
}
private Either<List<String>, TitanOperationStatus> getAllLatestCertifiedComponentUids(VertexTypeEnum vertexType, ComponentTypeEnum componentType) {
- LOGGER.info("Starting to fetch all latest certified not checked out components with type {} upon upgrade migration 1710 process", componentType);
+ log.info("Starting to fetch all latest certified not checked out components with type {} upon upgrade migration 1710 process", componentType);
Either<List<String>, TitanOperationStatus> result = null;
Map<String, String> latestCertifiedMap = new HashMap<>();
Map<String, String> latestNotCertifiedMap = new HashMap<>();
- Either<List<GraphVertex>, TitanOperationStatus> getComponentsRes = getAllLatestCertifiedComponents(vertexType, componentType);
+ Either<List<GraphVertex>, TitanOperationStatus> getComponentsRes = getAllLatestComponents(vertexType, componentType);
if (getComponentsRes.isRight() && getComponentsRes.right().value() != TitanOperationStatus.NOT_FOUND) {
- LOGGER.error("Failed to fetch all latest certified not checked out components with type {}. Status is {}. ", componentType, getComponentsRes.right().value());
+ log.error("Failed to fetch all latest certified not checked out components with type {}. Status is {}. ", componentType, getComponentsRes.right().value());
result = Either.right(getComponentsRes.right().value());
}
if (getComponentsRes.isRight()) {
@@ -791,7 +967,7 @@ public class UpgradeMigration1710 implements PostMigration {
return result;
}
- private Either<List<GraphVertex>, TitanOperationStatus> getAllLatestCertifiedComponents(VertexTypeEnum vertexType, ComponentTypeEnum componentType) {
+ private Either<List<GraphVertex>, TitanOperationStatus> getAllLatestComponents(VertexTypeEnum vertexType, ComponentTypeEnum componentType) {
Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, componentType.name());
@@ -799,8 +975,9 @@ public class UpgradeMigration1710 implements PostMigration {
Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
- if (vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE)
+ if (vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE) {
propertiesNotToMatch.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC.name());
+ }
return titanDao.getByCriteria(vertexType, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata);
}
@@ -815,7 +992,7 @@ public class UpgradeMigration1710 implements PostMigration {
Either<List<GraphVertex>, TitanOperationStatus> highestResources = titanDao.getByCriteria(null, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata);
if (highestResources.isRight()) {
TitanOperationStatus status = highestResources.right().value();
- LOGGER.debug("Failed to fetch resource with name {}. Status is {} ", nodeName, status);
+ log.debug("Failed to fetch resource with name {}. Status is {} ", nodeName, status);
return Either.right(DaoStatusConverter.convertTitanStatusToStorageStatus(status));
}
List<GraphVertex> resources = highestResources.left().value();
@@ -828,4 +1005,147 @@ public class UpgradeMigration1710 implements PostMigration {
return Either.left(result);
}
+ private void deleteMarkedComponents(NodeTypeEnum componentType, int toBeDeleted) {
+ Map<NodeTypeEnum, Either<List<String>, ResponseFormat>> cleanComponentsResult;
+ List<NodeTypeEnum> cleanComponents = new ArrayList<>();
+ cleanComponents.add(componentType);
+ try {
+ log.info("Trying to delete {} components of type {} marked as deleted", toBeDeleted, componentType);
+ cleanComponentsResult = componentsCleanBusinessLogic.cleanComponents(cleanComponents, true);
+ logDeleteResult(componentType, cleanComponentsResult.get(componentType));
+ }
+ catch (Exception e) {
+ log.error("Exception occurred {}", e.getMessage());
+ log.debug("Exception occurred", e);
+ }
+ }
+
+ private void logDeleteResult(NodeTypeEnum type, Either<List<String>, ResponseFormat> deleteResult) {
+ if (deleteResult == null) {
+ return;
+ }
+ if (deleteResult.isLeft()) {
+ log.info("Checked out {} versions are deleted successfully", type.getName());
+ }
+ else {
+ log.info("Cleanup of checked out {} versions failed due to the error: {}", type.getName(), deleteResult.right().value().getFormattedMessage());
+ }
+ }
+
+ private void markCheckedOutResourceAsDeletedIfUpgradeFailed(String certUid, boolean isNotFailed) {
+ String checkedOutUniqueId = certifiedToNextCheckedOutUniqueId.remove(certUid);
+ if (!isNotFailed && checkedOutUniqueId != null) {
+ try {
+ //mark as deleted the checked out resource as this upgrade failed
+ ResponseFormat respFormat = resourceBusinessLogic.deleteResource(checkedOutUniqueId.toLowerCase(), user);
+ log.info("Checked out resource uniqueId = {} is marked as deleted, status: {}", checkedOutUniqueId, respFormat.getFormattedMessage());
+ deleteResourcesIfLimitIsReached();
+ }
+ catch (Exception e) {
+ log.error("Error occurred:", e);
+ }
+ }
+ }
+
+ private void markCheckedOutServiceAsDeletedIfUpgradeFailed(String certUid, boolean isNotFailed) {
+ String checkedOutUniqueId = certifiedToNextCheckedOutUniqueId.remove(certUid);
+ if (!isNotFailed && checkedOutUniqueId != null) {
+ try {
+ //delete the checked out resource as this upgrade failed
+ ResponseFormat respFormat = serviceBusinessLogic.deleteService(checkedOutUniqueId.toLowerCase(), user);
+ log.info("Checked out service uniqueId = {} is marked as deleted, status: {}", checkedOutUniqueId, respFormat.getFormattedMessage());
+ deleteServicesIfLimitIsReached();
+ } catch (Exception e) {
+ log.error("Error occurred:", e);
+ }
+ }
+
+ }
+
+ void deleteResourcesIfLimitIsReached() {
+ markedAsDeletedResourcesCnt++;
+ if (markedAsDeletedResourcesCnt >= maxDeleteComponents) {
+ deleteMarkedComponents(NodeTypeEnum.Resource, markedAsDeletedResourcesCnt);
+ markedAsDeletedResourcesCnt = 0;
+ }
+ }
+
+ void deleteServicesIfLimitIsReached() {
+ markedAsDeletedServicesCnt++;
+ if (markedAsDeletedServicesCnt >= maxDeleteComponents) {
+ deleteMarkedComponents(NodeTypeEnum.Service, markedAsDeletedServicesCnt);
+ markedAsDeletedServicesCnt = 0;
+ }
+ }
+
+ boolean isLockDeleteOperationSucceeded() {
+ StorageOperationStatus status = componentsCleanBusinessLogic.lockDeleteOperation();
+
+ switch(status) {
+ case OK:
+ log.info("Lock delete operation succeeded");
+ isCleanupLocked = true;
+ break;
+ case FAILED_TO_LOCK_ELEMENT:
+ log.info("Delete operation node is already locked");
+ isCleanupLocked = isLockRetrySucceeded();
+ break;
+ default:
+ log.error("Lock delete operation failed due to the error: {}", status);
+ isCleanupLocked = false;
+ break;
+ }
+ return isCleanupLocked;
+ }
+
+ private boolean isLockRetrySucceeded() {
+ long startTime = System.currentTimeMillis();
+ //try to lock the cleanup resource until configurable time interval is finished
+ while (System.currentTimeMillis() - startTime <= deleteLockTimeoutInSeconds * 1000) {
+ try {
+ //sleep one second and try lock again
+ Thread.sleep(1000);
+ if (componentsCleanBusinessLogic.lockDeleteOperation() == StorageOperationStatus.OK) {
+ return true;
+ }
+ } catch (InterruptedException e) {
+ log.error("Error occurred: {}", e.getMessage());
+ }
+ }
+ return false;
+ }
+
+ void unlockDeleteOperation() {
+ if (isCleanupLocked) {
+ try {
+ componentsCleanBusinessLogic.unlockDeleteOperation();
+ log.info("Lock delete operation is canceled");
+ isCleanupLocked = false;
+ }
+ catch (Exception e) {
+ log.debug("Failed to unlock delete operation", e);
+ log.error("Failed to unlock delete operation due to the error {}", e.getMessage());
+ }
+ }
+ }
+
+ private Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkOutComponent(org.openecomp.sdc.be.model.Component component) {
+ log.info("Starting to perform check out of {} {}, uniqueId = {}", component.getComponentType(), component.getName(), component.getUniqueId());
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkoutRes =
+ lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
+ if (checkoutRes.isLeft()) {
+ //add the uniqueId from "upgradeVf(String uniqueId)" and checkouRes's uniqueUID to the new map
+ certifiedToNextCheckedOutUniqueId.put(component.getUniqueId(), checkoutRes.left().value().getUniqueId());
+ log.debug("Add checked out component uniqueId = {} produced from certified component uniqueId = {} to the checked out map", checkoutRes.left().value().getUniqueId(), component.getUniqueId());
+ }
+ return checkoutRes;
+ }
+
+ UpgradeStatus getVfUpgradeStatus(boolean isUpgraded, boolean isInstance) {
+ if (isUpgraded) {
+ return isInstance ? UpgradeStatus.UPGRADED_AS_INSTANCE : UpgradeStatus.UPGRADED;
+ }
+ return UpgradeStatus.NOT_UPGRADED;
+ }
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java
index 542db17425..d520eee1b8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java
@@ -1,10 +1,6 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1802;
-import java.math.BigInteger;
-import java.util.Arrays;
-import java.util.List;
-import java.util.stream.Collectors;
-
+import fj.data.Either;
import org.apache.commons.collections.ListUtils;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
@@ -21,15 +17,18 @@ import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementOperation;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.stereotype.Component;
-import fj.data.Either;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
@Component
public class SdcCatalogMigration implements Migration {
- private static final Logger LOGGER = LoggerFactory.getLogger(SdcCatalogMigration.class);
+ private static final Logger LOGGER = Logger.getLogger(SdcCatalogMigration.class);
private static final List<ResourceTypeEnum> EXCLUDE_TYPES = Arrays.asList(ResourceTypeEnum.VFCMT, ResourceTypeEnum.Configuration);
private ToscaElementOperation toscaElementOperation;
@@ -93,15 +92,23 @@ public class SdcCatalogMigration implements Migration {
private Either<List<GraphVertex>, TitanOperationStatus> getAllCatalogVertices() {
LOGGER.info("fetching all catalog resources");
return toscaElementOperation.getListOfHighestComponents(ComponentTypeEnum.RESOURCE, EXCLUDE_TYPES, JsonParseFlagEnum.ParseMetadata)
+ .right()
+ .bind(this::errOrEmptyListIfNotFound)
.left()
.bind(this::getAllCatalogVertices);
}
+ private Either<List<GraphVertex>, TitanOperationStatus> errOrEmptyListIfNotFound(TitanOperationStatus err) {
+ return TitanOperationStatus.NOT_FOUND.equals(err) ? Either.left(new ArrayList<>()) : Either.right(err);
+ }
+
@SuppressWarnings("unchecked")
private Either<List<GraphVertex>, TitanOperationStatus> getAllCatalogVertices(List<GraphVertex> allResourceCatalogVertices) {
LOGGER.info("number of resources: {}", allResourceCatalogVertices.size());
LOGGER.info("fetching all catalog services");
return toscaElementOperation.getListOfHighestComponents(ComponentTypeEnum.SERVICE, EXCLUDE_TYPES, JsonParseFlagEnum.ParseMetadata)
+ .right()
+ .bind(this::errOrEmptyListIfNotFound)
.left()
.map(allServiceVertices -> ListUtils.union(allServiceVertices, allResourceCatalogVertices));
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
new file mode 100644
index 0000000000..9634025867
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
@@ -0,0 +1,253 @@
+/*
+ * Copyright © 2016-2018 European Support Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
+
+import com.google.common.collect.ImmutableSet;
+import com.thinkaurelius.titan.core.TitanVertex;
+import fj.data.Either;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ForwardingPathDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.jsontitan.operations.ForwardingPathOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+
+@org.springframework.stereotype.Component
+public class ForwardPathMigration implements Migration {
+
+ private TitanDao titanDao;
+ private UserAdminOperation userAdminOperation;
+ private ToscaOperationFacade toscaOperationFacade;
+ private User user = null;
+
+ public ForwardPathMigration(TitanDao titanDao,
+ UserAdminOperation userAdminOperation, ToscaOperationFacade toscaOperationFacade) {
+ this.titanDao = titanDao;
+ this.userAdminOperation = userAdminOperation;
+ this.toscaOperationFacade = toscaOperationFacade;
+ }
+
+ @Override
+ public String description() {
+ return "remove corrupted forwarding paths ";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1806), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ final String userId = ConfigurationManager.getConfigurationManager().getConfiguration().getAutoHealingOwner();
+
+ Either<User, ActionStatus> userData = (Either<User, ActionStatus>) userAdminOperation
+ .getUserData(userId, false);
+ if (userData.isRight()) {
+ return MigrationResult.error(
+ "failed to delete unused forwarding paths. Failed to resolve user : " + userId + " error " + userData
+ .right().value());
+ } else {
+ user = userData.left().value();
+ }
+ StorageOperationStatus status = cleanAllServices();
+
+ return status == StorageOperationStatus.OK ? MigrationResult.success()
+ : MigrationResult.error("failed to remove corrupted forwarding paths . Error : " + status);
+
+ }
+
+ private StorageOperationStatus cleanAllServices() {
+ StorageOperationStatus status;
+
+ Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ Map<GraphPropertyEnum, Object> hasNotProps = new HashMap<>();
+ hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+ status = titanDao
+ .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll)
+ .either(this::cleanServices, this::handleError);
+ return status;
+ }
+
+ private StorageOperationStatus cleanServices(List<GraphVertex> containersV) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ for (GraphVertex container : containersV) {
+ ComponentParametersView componentParametersView = new ComponentParametersView();
+ componentParametersView.setIgnoreComponentInstances(false);
+ componentParametersView.setIgnoreCapabilities(false);
+ componentParametersView.setIgnoreRequirements(false);
+ componentParametersView.setIgnoreForwardingPath(false);
+ Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(container.getUniqueId(), componentParametersView);
+ if (toscaElement.isRight()) {
+ return toscaElement.right().value();
+ }
+ status = fixDataOnGraph(toscaElement.left().value());
+ if (status != StorageOperationStatus.OK) {
+ break;
+ }
+ }
+ return status;
+ }
+
+
+ private StorageOperationStatus handleError(TitanOperationStatus err) {
+ titanDao.rollback();
+ return DaoStatusConverter
+ .convertTitanStatusToStorageStatus(TitanOperationStatus.NOT_FOUND == err ? TitanOperationStatus.OK : err);
+ }
+
+ private StorageOperationStatus fixDataOnGraph(Component component) {
+ if (!(component instanceof Service)){
+ return StorageOperationStatus.OK;
+ }
+ Service service = (Service) component;
+ Either<GraphVertex, TitanOperationStatus> getResponse = titanDao.getVertexById(service.getUniqueId(),
+ JsonParseFlagEnum.NoParse);
+ if (getResponse.isRight()) {
+ return DaoStatusConverter.convertTitanStatusToStorageStatus(getResponse.right().value());
+
+ }
+ Set<String> ciNames = new HashSet<>();
+ if (service.getComponentInstances() != null && !service.getComponentInstances().isEmpty()) {
+ ciNames = service.getComponentInstances().stream().map(ci -> ci.getName())
+ .collect(Collectors.toSet());
+ }
+ GraphVertex componentVertex = getResponse.left().value();
+
+ GraphVertex toscaDataVertex;
+ Either<GraphVertex, TitanOperationStatus> groupVertexEither = titanDao.getChildVertex(componentVertex,
+ EdgeLabelEnum.FORWARDING_PATH, JsonParseFlagEnum.ParseJson);
+ if (groupVertexEither.isRight() && groupVertexEither.right().value() == TitanOperationStatus.NOT_FOUND) {
+ return StorageOperationStatus.OK;
+ }
+ if (groupVertexEither.isRight()) {
+ return DaoStatusConverter.convertTitanStatusToStorageStatus(groupVertexEither.right().value());
+ }
+ toscaDataVertex = groupVertexEither.left().value();
+ Map<String, ForwardingPathDataDefinition> forwardingPaths = new HashMap<>(
+ (Map<String, ForwardingPathDataDefinition>) toscaDataVertex.getJson());
+ List<String> toBeDeletedFP = new ArrayList<>();
+ for (Map.Entry<String, ForwardingPathDataDefinition> forwardingPathDataDefinition : forwardingPaths
+ .entrySet()) {
+ Set<String> nodeNames = forwardingPathDataDefinition.getValue().getPathElements()
+ .getListToscaDataDefinition()
+ .stream().map(element -> ImmutableSet.of(element.getFromNode(), element.getToNode()))
+ .flatMap(set -> set.stream()).collect(Collectors.toSet());
+ if (!ciNames.containsAll(nodeNames)) {
+ toBeDeletedFP.add(forwardingPathDataDefinition.getKey());
+ }
+ }
+ if (toBeDeletedFP.isEmpty()) {
+ titanDao.rollback();
+ return StorageOperationStatus.OK;
+ }
+ toBeDeletedFP.stream().forEach(fpKey -> forwardingPaths.remove(fpKey));
+ toscaDataVertex.setJson(forwardingPaths);
+ Either<GraphVertex, TitanOperationStatus> updatevertexEither = updateOrCopyOnUpdate(
+ toscaDataVertex, componentVertex);
+ if (updatevertexEither.isRight()) {
+ titanDao.rollback();
+ return DaoStatusConverter.convertTitanStatusToStorageStatus(updatevertexEither.right().value());
+ }
+ titanDao.commit();
+ return StorageOperationStatus.OK;
+ }
+
+ private Either<GraphVertex, TitanOperationStatus> cloneDataVertex(GraphVertex dataVertex, GraphVertex toscaElementVertex, Edge edgeToRemove) {
+ EdgeLabelEnum label = EdgeLabelEnum.FORWARDING_PATH;
+ GraphVertex newDataVertex = new GraphVertex(dataVertex.getLabel());
+ String id = IdBuilderUtils.generateChildId(toscaElementVertex.getUniqueId(), dataVertex.getLabel());
+ newDataVertex.cloneData(dataVertex);
+ newDataVertex.setUniqueId(id);
+
+ Either<GraphVertex, TitanOperationStatus> createVertex = titanDao.createVertex(newDataVertex);
+ if (createVertex.isRight()) {
+ return createVertex;
+ }
+ newDataVertex = createVertex.left().value();
+ TitanOperationStatus createEdge = titanDao.createEdge(toscaElementVertex, newDataVertex, label, titanDao.getEdgeProperties(edgeToRemove));
+ if (createEdge != TitanOperationStatus.OK) {
+ return Either.right(createEdge);
+ }
+ edgeToRemove.remove();
+ return Either.left(newDataVertex);
+ }
+
+ private Either<GraphVertex, TitanOperationStatus> updateOrCopyOnUpdate(GraphVertex dataVertex, GraphVertex toscaElementVertex ) {
+ EdgeLabelEnum label = EdgeLabelEnum.FORWARDING_PATH;
+ Iterator<Edge> edges = dataVertex.getVertex().edges(Direction.IN, label.name());
+ int edgeCount = 0;
+ Edge edgeToRemove = null;
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+ ++edgeCount;
+ Vertex outVertex = edge.outVertex();
+ String outId = (String) titanDao.getProperty((TitanVertex) outVertex, GraphPropertyEnum.UNIQUE_ID.getProperty());
+ if (toscaElementVertex.getUniqueId().equals(outId)) {
+ edgeToRemove = edge;
+ }
+ }
+ if (edgeToRemove == null) {
+ return Either.right(TitanOperationStatus.GENERAL_ERROR);
+ }
+ switch (edgeCount) {
+ case 0:
+ // error
+ return Either.right(TitanOperationStatus.GENERAL_ERROR);
+ case 1:
+ // update
+ return titanDao.updateVertex(dataVertex);
+ default:
+ // copy on update
+ return cloneDataVertex(dataVertex, toscaElementVertex, edgeToRemove);
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
new file mode 100644
index 0000000000..7d741624c2
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
@@ -0,0 +1,138 @@
+package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
+import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class ResourceLifecycleMigration implements Migration {
+
+ private TitanDao titanDao;
+ private LifecycleBusinessLogic lifecycleBusinessLogic;
+ private UserAdminOperation userAdminOperation;
+
+ private User user = null;
+
+ private static final Logger log = Logger.getLogger(ResourceLifecycleMigration.class);
+
+ public ResourceLifecycleMigration(TitanDao titanDao, LifecycleBusinessLogic lifecycleBusinessLogic, UserAdminOperation userAdminOperation) {
+ this.titanDao = titanDao;
+ this.lifecycleBusinessLogic = lifecycleBusinessLogic;
+ this.userAdminOperation = userAdminOperation;
+ }
+
+ @Override
+ public String description() {
+ return "change resource lifecycle state from testing to certified";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1806), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ log.info("start change resource lifecycle states migration");
+ final String userId = ConfigurationManager.getConfigurationManager().getConfiguration().getAutoHealingOwner();
+
+ Either<User, ActionStatus> userReq = userAdminOperation.getUserData(userId, false);
+ if (userReq.isRight()) {
+ log.error("Upgrade migration failed. User {} resolve failed: {} ", userId, userReq.right().value());
+ return MigrationResult.error("failed to change lifecycle state of resources. Failed to resolve user : " + userId + " error " + userReq.right().value());
+ } else {
+ user = userReq.left().value();
+ log.info("User {} will perform upgrade operation with role {}", user.getUserId(), user.getRole());
+ }
+
+ StorageOperationStatus status = changeResourceLifecycleState();
+
+ return status == StorageOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to change lifecycle state of resources. Error : " + status);
+ }
+
+ private StorageOperationStatus changeResourceLifecycleState() {
+ StorageOperationStatus status;
+ status = findResourcesAndChangeStatus(VertexTypeEnum.NODE_TYPE);
+ if (StorageOperationStatus.OK == status) {
+ status = findResourcesAndChangeStatus(VertexTypeEnum.TOPOLOGY_TEMPLATE);
+ }
+ titanDao.commit();
+ return status;
+ }
+
+ private StorageOperationStatus findResourcesAndChangeStatus(VertexTypeEnum type) {
+ StorageOperationStatus status;
+ Map<GraphPropertyEnum, Object> props = new EnumMap<>(GraphPropertyEnum.class);
+ props.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
+ props.put(GraphPropertyEnum.STATE, LifecycleStateEnum.READY_FOR_CERTIFICATION.name());
+ props.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+
+ Map<GraphPropertyEnum, Object> hasNot = new EnumMap<>(GraphPropertyEnum.class);
+ hasNot.put(GraphPropertyEnum.IS_DELETED, true);
+
+ log.info("findResourcesAndChangeStatus for type {} and state {}", type ,LifecycleStateEnum.READY_FOR_CERTIFICATION);
+ status = titanDao.getByCriteria(type, props, hasNot, JsonParseFlagEnum.ParseAll).either(this::changeState, this::handleError);
+ log.info("status {} for type {} and state {}", status, type ,LifecycleStateEnum.READY_FOR_CERTIFICATION);
+
+ log.info("findResourcesAndChangeStatus for type {} and state {}", type ,LifecycleStateEnum.CERTIFICATION_IN_PROGRESS);
+ props.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFICATION_IN_PROGRESS.name());
+ status = titanDao.getByCriteria(type, props, hasNot, JsonParseFlagEnum.ParseAll).either(this::changeState, this::handleError);
+ log.info("status {} for type {} and state {}", status, type ,LifecycleStateEnum.CERTIFICATION_IN_PROGRESS);
+
+
+ return status;
+ }
+
+ private StorageOperationStatus changeState(List<GraphVertex> resourcesV) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+
+ for (GraphVertex resourceV : resourcesV) {
+ status = changeResourceState(resourceV);
+ if (status != StorageOperationStatus.OK) {
+ log.info("Failed to change state to certified of resource with id {} , continue to next, reset status", resourceV.getUniqueId() );
+ status = StorageOperationStatus.OK;
+ }
+ }
+ return status;
+ }
+
+ private StorageOperationStatus changeResourceState(GraphVertex resourceV) {
+ log.debug("Change state to certified to resource with id {} ", resourceV.getUniqueId() );
+
+ LifecycleChangeInfoWithAction changeInfo = new LifecycleChangeInfoWithAction("change resource state by migration");
+ final Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeComponentState = lifecycleBusinessLogic.changeComponentState(ComponentTypeEnum.RESOURCE, resourceV.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, false, true);
+ return changeComponentState.isLeft() ? StorageOperationStatus.OK : StorageOperationStatus.GENERAL_ERROR;
+ }
+
+ private StorageOperationStatus handleError(TitanOperationStatus err) {
+ log.debug("receive titan error {}", err);
+ return DaoStatusConverter.convertTitanStatusToStorageStatus(TitanOperationStatus.NOT_FOUND == err ? TitanOperationStatus.OK : err);
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
new file mode 100644
index 0000000000..4786839250
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
@@ -0,0 +1,237 @@
+package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapPropertiesDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.jsontitan.enums.JsonConstantKeysEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.NodeTemplateOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.*;
+import java.util.Map.Entry;
+
+@Component
+public class SDCInstancesMigration implements Migration {
+
+ private TitanDao titanDao;
+ private NodeTemplateOperation nodeTemplateOperation;
+
+ private static final Logger log = Logger.getLogger(SDCInstancesMigration.class);
+
+ private static final String ALLOTTED_CATEGORY = "Allotted Resource";
+
+ private static final List<String> UUID_PROPS_NAMES = Arrays.asList("providing_service_uuid", "providing_service_uuid");
+
+
+ public SDCInstancesMigration(TitanDao titanDao, NodeTemplateOperation nodeTemplateOperation) {
+ this.titanDao = titanDao;
+ this.nodeTemplateOperation = nodeTemplateOperation;
+ }
+
+ @Override
+ public String description() {
+ return "connect instances in container to its origins";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1806), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status = connectAllContainers();
+
+ return status == StorageOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to create connection between instances and origins. Error : " + status);
+ }
+
+ private StorageOperationStatus connectAllContainers() {
+ StorageOperationStatus status;
+ Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
+ hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+ hasNotProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC);
+
+ status = titanDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, hasNotProps, JsonParseFlagEnum.ParseAll)
+ .either(this::connectAll, this::handleError);
+ return status;
+ }
+
+ private StorageOperationStatus handleError(TitanOperationStatus err) {
+ return DaoStatusConverter.convertTitanStatusToStorageStatus(TitanOperationStatus.NOT_FOUND == err ? TitanOperationStatus.OK : err);
+ }
+
+ private StorageOperationStatus connectAll(List<GraphVertex> containersV) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ for (GraphVertex container : containersV) {
+ status = handleOneContainer(container);
+ if (status != StorageOperationStatus.OK) {
+ break;
+ }
+ }
+ return status;
+ }
+
+ private StorageOperationStatus handleOneContainer(GraphVertex containerV) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+
+ boolean needConnectAllotted = false;
+ ComponentTypeEnum componentType = containerV.getType();
+ Map<String, MapPropertiesDataDefinition> instanceProperties = null;
+ if (componentType == ComponentTypeEnum.RESOURCE) {
+ Either<GraphVertex, TitanOperationStatus> subcategoryV = titanDao.getChildVertex(containerV, EdgeLabelEnum.CATEGORY, JsonParseFlagEnum.NoParse);
+ if (subcategoryV.isRight()) {
+ log.debug("Failed to fetch category vertex for resource {} error {} ", containerV.getUniqueId(), subcategoryV.right().value());
+ return StorageOperationStatus.GENERAL_ERROR;
+ }
+ GraphVertex catV = subcategoryV.left().value();
+ Map<GraphPropertyEnum, Object> metadataProperties = catV.getMetadataProperties();
+
+ String name = (String) metadataProperties.get(GraphPropertyEnum.NAME);
+ if (name.equals(ALLOTTED_CATEGORY)) {
+ log.debug("Find allotted resource {}.", containerV.getUniqueId());
+ needConnectAllotted = true;
+ Either<Map<String, MapPropertiesDataDefinition>, StorageOperationStatus> instProperties = getInstProperties(containerV);
+ if ( instProperties.isRight() ){
+ return instProperties.right().value();
+ }
+ instanceProperties = instProperties.left().value();
+ }
+ }
+ Map<String, CompositionDataDefinition> jsonComposition = (Map<String, CompositionDataDefinition>) containerV.getJson();
+ if (jsonComposition != null && !jsonComposition.isEmpty()) {
+ try {
+ status = connectInstances(containerV, needConnectAllotted, instanceProperties, jsonComposition);
+
+ } finally {
+ if (status == StorageOperationStatus.OK) {
+ titanDao.commit();
+ } else {
+ titanDao.rollback();
+ }
+ }
+ }
+ return status;
+ }
+
+ private Either<Map<String, MapPropertiesDataDefinition>, StorageOperationStatus> getInstProperties(GraphVertex containerV) {
+ Map<String, MapPropertiesDataDefinition> instanceProperties;
+ Either<GraphVertex, TitanOperationStatus> instProps = titanDao.getChildVertex(containerV, EdgeLabelEnum.INST_PROPERTIES, JsonParseFlagEnum.ParseAll);
+
+ if (instProps.isRight()) {
+ if (instProps.right().value() == TitanOperationStatus.NOT_FOUND) {
+ instanceProperties = new HashMap<>();
+ } else {
+ log.debug("Failed to fetch instance properties vertex for resource {} error {} ", containerV.getUniqueId(), instProps.right().value());
+ return Either.right(StorageOperationStatus.GENERAL_ERROR);
+ }
+ } else {
+ instanceProperties = (Map<String, MapPropertiesDataDefinition>) instProps.left().value().getJson();
+ }
+ return Either.left(instanceProperties);
+ }
+
+ private StorageOperationStatus connectInstances(GraphVertex containerV, boolean needConnectAllotted, Map<String, MapPropertiesDataDefinition> instanceProperties,
+ Map<String, CompositionDataDefinition> jsonComposition) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ CompositionDataDefinition compositionDataDefinition = jsonComposition.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+ Map<String, ComponentInstanceDataDefinition> componentInstances = compositionDataDefinition.getComponentInstances();
+ for (Map.Entry<String, ComponentInstanceDataDefinition> entry : componentInstances.entrySet()) {
+ status = handleInstance(containerV, needConnectAllotted, instanceProperties, entry);
+ if ( status != StorageOperationStatus.OK){
+ if ( status == StorageOperationStatus.NOT_FOUND ){
+ log.debug("reset status and continue");
+ status = StorageOperationStatus.OK;
+ }else{
+ log.debug("Failed handle instance. exit");
+ break;
+ }
+ }
+ }
+ return status;
+ }
+
+ private StorageOperationStatus handleInstance(GraphVertex containerV, boolean needConnectAllotted, Map<String, MapPropertiesDataDefinition> instanceProperties, Map.Entry<String, ComponentInstanceDataDefinition> entry) {
+ ComponentInstanceDataDefinition instance = entry.getValue();
+ StorageOperationStatus status = nodeTemplateOperation.createInstanceEdge(containerV, instance);
+ if (status != StorageOperationStatus.OK) {
+ if ( status == StorageOperationStatus.NOT_FOUND ){
+ Boolean highest = (Boolean) containerV.getMetadataProperties().get(GraphPropertyEnum.IS_HIGHEST_VERSION);
+ log.debug("No origin for instance {} with ID {}. The component is highest ={}, Reset status and continue.. ", instance.getUniqueId(), instance.getComponentUid(), highest);
+ status = StorageOperationStatus.OK;
+ }else{
+ log.debug("Failed to connect in container {} instance {} to origin {} error {} ", containerV.getUniqueId(), instance.getUniqueId(), instance.getComponentUid(), status);
+ return status;
+ }
+ }
+ if (needConnectAllotted) {
+ status = connectAllotedInstances(containerV, instanceProperties, instance);
+ }
+ return status;
+ }
+
+ private StorageOperationStatus connectAllotedInstances(GraphVertex containerV, Map<String, MapPropertiesDataDefinition> instanceProperties, ComponentInstanceDataDefinition instance) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ if ( instanceProperties != null ){
+ MapPropertiesDataDefinition mapPropertiesDataDefinition = instanceProperties.get(instance.getUniqueId());
+ if ( mapPropertiesDataDefinition != null ){
+ status = checkAllottedPropertyAndConnect(containerV, instance, mapPropertiesDataDefinition);
+ }else{
+ log.debug("No isntances properties for instance {}", instance.getUniqueId());
+ }
+ }
+ return status;
+ }
+
+ private StorageOperationStatus checkAllottedPropertyAndConnect(GraphVertex containerV, ComponentInstanceDataDefinition instance, MapPropertiesDataDefinition mapPropertiesDataDefinition) {
+ Map<String, PropertyDataDefinition> mapToscaDataDefinition = mapPropertiesDataDefinition.getMapToscaDataDefinition();
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ Optional<Entry<String, PropertyDataDefinition>> findFirst = mapToscaDataDefinition
+ .entrySet()
+ .stream()
+ .filter(e -> UUID_PROPS_NAMES.contains(e.getKey()))
+ .findFirst();
+
+ if ( findFirst.isPresent() ){
+ PropertyDataDefinition property = findFirst.get().getValue();
+ String serviceUUID = property.getValue();
+ if ( serviceUUID != null ){
+ log.debug("Defined reference service on property {} value {} on instance {}", property.getName(), property.getValue(), instance.getUniqueId() );
+ status = nodeTemplateOperation.createAllottedOfEdge(containerV.getUniqueId(), instance.getUniqueId(), serviceUUID);
+ if ( status != StorageOperationStatus.OK ){
+ if ( status == StorageOperationStatus.NOT_FOUND ){
+ Boolean highest = (Boolean) containerV.getMetadataProperties().get(GraphPropertyEnum.IS_HIGHEST_VERSION);
+ log.debug("No origin for allotted reference {} with UUID {}. the component highest = {}, Reset status and continue.. ", instance.getUniqueId(), serviceUUID, highest);
+ status = StorageOperationStatus.OK;
+ }else{
+ log.debug("Failed to connect in container {} instance {} to allotted service {} error {} ", containerV.getUniqueId(), instance.getUniqueId(), instance.getComponentUid(), status);
+ return status;
+ }
+ }
+ }else{
+ log.debug("No value for property {} on instance {}", property.getName(),instance.getUniqueId() );
+ }
+ }else{
+ log.debug("No sercific properties of dependencies for instance {}", instance.getUniqueId());
+ }
+ return status;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigration.java
new file mode 100644
index 0000000000..bdab2cee2b
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigration.java
@@ -0,0 +1,76 @@
+package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+
+@Component
+public class SdcArchiveMigration implements Migration {
+ private static final Logger logger = Logger.getLogger(SdcArchiveMigration.class);
+
+ private TitanDao titanDao;
+
+ public SdcArchiveMigration(TitanDao titanDao) {
+ this.titanDao = titanDao;
+ }
+
+ @Override
+ public String description() {
+ return "add archive node for archiving/restoring components ";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1806), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ TitanOperationStatus status = null;
+ try {
+ status = getOrCreateArchiveRoot();
+ return status == TitanOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to create archive root node. error: " + status);
+ } finally {
+ commitOrRollBack(status);
+ }
+ }
+
+ private void commitOrRollBack(TitanOperationStatus status) {
+ if (status == TitanOperationStatus.OK) {
+ titanDao.commit();
+ } else {
+ titanDao.rollback();
+ }
+ }
+
+ private TitanOperationStatus getOrCreateArchiveRoot() {
+ logger.info("creating or getting catalog archive vertex");
+ return titanDao.getVertexByLabel(VertexTypeEnum.ARCHIVE_ROOT)
+ .either(v -> TitanOperationStatus.OK, s -> this.createRootArchiveVertex());
+ }
+
+ private TitanOperationStatus createRootArchiveVertex() {
+ GraphVertex archiveRootVertex = new GraphVertex(VertexTypeEnum.ARCHIVE_ROOT);
+ archiveRootVertex.setUniqueId(IdBuilderUtils.generateUniqueId());
+ archiveRootVertex.addMetadataProperty(GraphPropertyEnum.LABEL, VertexTypeEnum.ARCHIVE_ROOT);
+ archiveRootVertex.addMetadataProperty(GraphPropertyEnum.UNIQUE_ID, archiveRootVertex.getUniqueId());
+
+ logger.info("Creating root archive vertex {}", archiveRootVertex.getUniqueId());
+
+ final Either<GraphVertex, TitanOperationStatus> vertexE = titanDao.createVertex(archiveRootVertex);
+
+ return vertexE.isLeft() ? TitanOperationStatus.OK : vertexE.right().value();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
index 8899aa1e5e..e5ea76ad56 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
@@ -20,18 +20,17 @@
package org.openecomp.sdc.asdctool.servlets;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
@Path("/entrypoint")
public class EntryPoint {
- private static Logger log = LoggerFactory.getLogger(EntryPoint.class.getName());
+ private static Logger log = Logger.getLogger(EntryPoint.class.getName());
@GET
@Path("test")
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
index 62b59f13f3..c1f9335d59 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
@@ -20,15 +20,13 @@
package org.openecomp.sdc.asdctool.servlets;
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Map.Entry;
-import java.util.Properties;
+import com.thinkaurelius.titan.core.TitanGraph;
+import org.apache.commons.configuration.BaseConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.openecomp.sdc.asdctool.Utils;
+import org.openecomp.sdc.common.log.wrappers.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
@@ -36,22 +34,15 @@ import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-
-import org.apache.commons.configuration.BaseConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
-import org.glassfish.jersey.media.multipart.FormDataParam;
-import org.openecomp.sdc.asdctool.Utils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.thinkaurelius.titan.core.TitanGraph;
+import java.io.*;
+import java.util.Map.Entry;
+import java.util.Properties;
//import com.tinkerpop.blueprints.util.io.graphml.GraphMLWriter;
@Path("/titan")
public class ExportImportTitanServlet {
- private static Logger log = LoggerFactory.getLogger(ExportImportTitanServlet.class.getName());
+ private static Logger log = Logger.getLogger(ExportImportTitanServlet.class.getName());
@GET
@Path("export")
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
index 9ea2d5ad49..045ac5cafd 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
@@ -1,9 +1,5 @@
package org.openecomp.sdc.asdctool.simulator.tenant;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.function.Consumer;
-
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
@@ -11,6 +7,10 @@ import org.openecomp.sdc.common.impl.FSConfigurationSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Consumer;
+
/**
* Main class of utility imports CSV file into the specified table
* The old stuff of the table is removed.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
index 1caf073c79..bf192f352b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
@@ -1,8 +1,7 @@
package org.openecomp.sdc.asdctool.simulator.tenant;
-import org.openecomp.sdc.be.datatypes.enums.EnvironmentStatusEnum;
-
import com.opencsv.bean.CsvBindByPosition;
+import org.openecomp.sdc.be.datatypes.enums.EnvironmentStatusEnum;
/**
* Represents line in CSV file should be imported into "operationalenvironment" table.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
index d4dbddde3f..baee8aa060 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
@@ -1,12 +1,6 @@
package org.openecomp.sdc.asdctool.simulator.tenant;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.List;
-import java.util.stream.Collectors;
-
+import com.opencsv.bean.CsvToBeanBuilder;
import org.openecomp.sdc.be.dao.cassandra.OperationalEnvironmentDao;
import org.openecomp.sdc.be.dao.cassandra.schema.Table;
import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
@@ -14,7 +8,12 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-import com.opencsv.bean.CsvToBeanBuilder;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.stream.Collectors;
/**
* Imports CSV file into
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ConsoleWriter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ConsoleWriter.java
new file mode 100644
index 0000000000..934f462ca8
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ConsoleWriter.java
@@ -0,0 +1,37 @@
+package org.openecomp.sdc.asdctool.utils;
+
+public class ConsoleWriter {
+ private static String tabbedData(String data, int min) {
+ // System.out.println(); //for debug
+
+ int tabcount = 0;
+ int len = 8 * min;
+
+ while (data.length() < len) {
+ tabcount++;
+ len = len - 8;
+ }
+
+ // System.out.println("debug: tabcount=" + tabcount);
+ // System.out.print("debug adding tabs... ");
+ for (int x = 0; x < tabcount; x++) {
+ // System.out.print("tab ");
+ data = data + "\t";
+ }
+ // System.out.println(); //for debug
+
+ return data;
+ }
+
+ public static void dataLine(String name) {
+ dataLine(name, null);
+ }
+
+ public static void dataLine(String name, Object data) {
+ if (data != null) {
+ System.out.println(tabbedData(name, 3) + data);
+ } else {
+ System.out.println(tabbedData(name, 3));
+ }
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ReportWriter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ReportWriter.java
new file mode 100644
index 0000000000..64e9290f72
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ReportWriter.java
@@ -0,0 +1,50 @@
+package org.openecomp.sdc.asdctool.utils;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import org.openecomp.sdc.be.dao.jsongraph.utils.JsonParserUtils;
+
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+public class ReportWriter {
+ FileWriter file;
+ public ReportWriter(String reportName) {
+
+ StringBuilder sb = new StringBuilder();
+ Path path = Paths.get("/var/tmp/");
+ if ( path.toFile().exists() ) {
+ sb.append("/var/tmp/");
+ }
+ sb.append("report_").append(reportName).append("_").append(System.currentTimeMillis()).append(".json");
+ String fileName = sb.toString();
+ try {
+ file = new FileWriter(fileName);
+ } catch (IOException e) {
+ System.out.println("Failed to create report file. " + e.getMessage());
+ }
+ }
+
+ public void report(Object objectToWrite) throws IOException {
+ if (file != null) {
+ JsonParser parser = new JsonParser();
+ JsonObject json = parser.parse(JsonParserUtils.toJson(objectToWrite)).getAsJsonObject();
+
+ Gson gson = new GsonBuilder().setPrettyPrinting().create();
+ String prettyJson = gson.toJson(json);
+
+ file.write(prettyJson);
+ file.flush();
+ }
+ }
+
+ public void close() throws IOException {
+ if (file != null) {
+ file.close();
+ }
+ }
+}
diff --git a/asdctool/src/main/resources/application-context.xml b/asdctool/src/main/resources/application-context.xml
index cf5ae5a767..c9a13df44a 100644
--- a/asdctool/src/main/resources/application-context.xml
+++ b/asdctool/src/main/resources/application-context.xml
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:aop="http://www.springframework.org/schema/aop" xmlns:util="http://www.springframework.org/schema/util"
- xsi:schemaLocation="
+ xmlns:util="http://www.springframework.org/schema/util"
+ xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd">
diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml
index c92827e558..14fd1c5123 100644
--- a/asdctool/src/main/resources/config/configuration.yaml
+++ b/asdctool/src/main/resources/config/configuration.yaml
@@ -6,7 +6,7 @@ identificationHeaderFields:
- HTTP_CSP_WSTYPE
# catalog backend hostname
-beFqdn: localhost
+beFqdn: 192.168.33.10
# sdccatalog.att.com
# catalog backend http port
@@ -23,18 +23,25 @@ beSslPort: 8443
version: 1.0
released: 2012-11-30
-toscaConformanceLevel: 5.0
+toscaConformanceLevel: 8.0
minToscaConformanceLevel: 3.0
# These values are necessary for running upgrade migration 1710.0 process
+enableAutoHealing: false
appVersion: 1.1.0
artifactGeneratorConfig: Artifact-Generator.properties
resourcesForUpgrade:
- 5.0:
- - tosca.nodes.Root
+ 8.0:
+ - org.openecomp.resource.cp.extCP
+ - tosca.nodes.network.Network
+ - tosca.nodes.network.Port
+ - org.openecomp.resource.cp.nodes.network.SubInterface
skipUpgradeFailedVfs: true
skipUpgradeVSPs: true
autoHealingOwner: jh0003
+supportAllottedResourcesAndProxy: false
+deleteLockTimeoutInSeconds: 60
+maxDeleteComponents: 10
titanCfgFile: src\main\resources\config\titan.properties
titanMigrationKeySpaceCfgFile: src\main\resources\config\titan-migration.properties
@@ -84,20 +91,22 @@ neo4j:
password: "12345"
cassandraConfig:
- cassandraHosts: ['localhost']
- localDataCenter: datacenter1
+ cassandraHosts: [192.168.33.10]
+ localDataCenter: DC-AIO-Ubuntu1
reconnectTimeout : 30000
- authenticate: false
- username: koko
- password: bobo
- ssl: false
- truststorePath : /path/path
- truststorePassword : 123123
+ authenticate: true
+ username: asdc_user
+ password: Aa1234%^!
+ ssl: true
+ truststorePath : C:/Users/im453s/git/vagrant-sdc-all-in-one/tmp.trust
+ truststorePassword : Aa123456
keySpaces:
- - { name: sdcaudit, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
- - { name: sdcartifact, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
- - { name: sdccomponent, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
- - { name: sdcrepository, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
+ - { name: dox, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
+ - { name: sdcaudit, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
+ - { name: sdcartifact, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
+ - { name: sdccomponent, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
+ - { name: sdcrepository, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
+
#Application-specific settings of ES
elasticSearch:
@@ -203,6 +212,7 @@ toscaArtifacts:
type: TOSCA_CSAR
description: TOSCA definition package of the asset
+
#Informational artifacts placeHolder
excludeResourceCategory:
- Generic
@@ -329,7 +339,7 @@ serviceDeploymentArtifacts:
- xml
AAI_VF_INSTANCE_MODEL:
acceptedTypes:
- - xml
+ - xml
OTHER:
acceptedTypes:
diff --git a/asdctool/src/main/resources/config/titan.properties b/asdctool/src/main/resources/config/titan.properties
index 5411a44224..5f22a08837 100644
--- a/asdctool/src/main/resources/config/titan.properties
+++ b/asdctool/src/main/resources/config/titan.properties
@@ -1,11 +1,28 @@
-storage.backend=cassandra
-storage.hostname=localhost
+storage.backend=cassandrathrift
+storage.hostname=192.168.33.10
storage.port=9160
+storage.username=asdc_user
+storage.password=Aa1234%^!
+storage.connection-timeout=10000
+storage.cassandra.keyspace=sdctitan
+
+storage.cassandra.ssl.enabled=true
+storage.cassandra.ssl.truststore.location=C:\\gitWork\\vagrant-sdc-all-in-one\\mytmp.trust
+storage.cassandra.ssl.truststore.password=Aa123456
+
+storage.cassandra.read-consistency-level=LOCAL_QUORUM
+storage.cassandra.write-consistency-level=LOCAL_QUORUM
+storage.cassandra.replication-strategy-class=org.apache.cassandra.locator.NetworkTopologyStrategy
+storage.cassandra.replication-strategy-options=DC-sdc-iltlv633,1
+storage.cassandra.astyanax.local-datacenter=DC-sdc-iltlv633
+
cache.db-cache = false
cache.db-cache-clean-wait = 20
cache.db-cache-time = 180000
cache.db-cache-size = 0.5
-cache.tx-cache-size = 500000
-storage.cassandra.keyspace=sdctitan
+cache.tx-cache-size = 1000000
+
+storage.lock.retries=5
+storage.lock.wait-time=500
diff --git a/asdctool/src/main/resources/scripts/deleteComponentTool.sh b/asdctool/src/main/resources/scripts/deleteComponentTool.sh
new file mode 100644
index 0000000000..afdf8166f5
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/deleteComponentTool.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+##############################
+# Distribution Status Update 1707
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+ FULL_PATH=$BASEDIR
+else
+ FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.DeleteComponentTool"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/generateCsar.sh b/asdctool/src/main/resources/scripts/generateCsar.sh
new file mode 100644
index 0000000000..c74168823c
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/generateCsar.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+##############################
+# Distribution Status Update 1707
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+ FULL_PATH=$BASEDIR
+else
+ FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.CsarGeneratorTool"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/vrfObjectFix.sh b/asdctool/src/main/resources/scripts/vrfObjectFix.sh
new file mode 100644
index 0000000000..3f875fa136
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/vrfObjectFix.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+##############################
+# Data Migration
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+ FULL_PATH=$BASEDIR
+else
+ FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.VrfObjectFixMenu"
+
+command="java $JVM_LOG_FILE -Xmx6000M -cp $JARS $mainClass $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java
new file mode 100644
index 0000000000..36f5e076b0
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java
@@ -0,0 +1,56 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+
+
+/**
+ * Unit test for simple App.
+ */
+public class AppTest extends TestCase {
+ /**
+ * Create the test case
+ *
+ * @param testName
+ * name of the test case
+ */
+ public AppTest(String testName) {
+ super(testName);
+ }
+
+ /**
+ * @return the suite of tests being tested
+ */
+ public static Test suite() {
+ return new TestSuite(AppTest.class);
+ }
+
+ /**
+ * Rigourous Test :-)
+ */
+ public void testApp() {
+ assertTrue(true);
+ }
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
index ac93b92fd1..020696c42d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
@@ -1,16 +1,14 @@
package org.openecomp.sdc.asdctool;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.ws.rs.core.Response;
-
+import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.junit.Assert;
import org.junit.Test;
-import com.thinkaurelius.titan.core.TitanGraph;
+import javax.ws.rs.core.Response;
+import java.util.HashMap;
+import java.util.Map;
public class UtilsTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java
index 86ee638c72..f069f6cf97 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java
@@ -1,12 +1,11 @@
package org.openecomp.sdc.asdctool.configuration.mocks.es;
-import java.util.List;
-
+import fj.data.Either;
import org.junit.Test;
import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
import org.openecomp.sdc.be.resources.data.ESArtifactData;
-import fj.data.Either;
+import java.util.List;
public class ESCatalogDAOMockTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnumTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnumTest.java
index 2d1c6a8e57..5638c4581d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnumTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnumTest.java
@@ -1,8 +1,5 @@
package org.openecomp.sdc.asdctool.enums;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -23,44 +20,6 @@ public class SchemaZipFileEnumTest {
result = null;
}
- @Test
- public void setGetFileName_shouldSetCustomFileName() {
- String fileName = "customFileName";
- testSubject.setFileName(fileName);
- assertEquals(fileName, testSubject.getFileName());
- }
-
- @Test
- public void setGetSourceFolderName_shouldSetCustomSourceFolderName() {
- String sourceFolderName = "customSourceFolderName";
- testSubject.setSourceFolderName(sourceFolderName);
- assertEquals(sourceFolderName, testSubject.getSourceFolderName());
- }
-
- @Test
- public void setGetSourceFileName_shouldSetCustomSourceFileName() {
- String sourceFileName = "customSourceFileName";
- testSubject.setSourceFileName(sourceFileName);
- assertEquals(sourceFileName, testSubject.getSourceFileName());
- }
-
- @Test
- public void setGetCollectionTitle_shouldSetCustomCollectionTitle() {
- String collectionTitle = "customCollectionTitle";
- testSubject.setCollectionTitle(collectionTitle);
- assertEquals(collectionTitle, testSubject.getCollectionTitle());
- }
-
- @Test
- public void setGetImportFileList_shouldSetGetFile1File2() {
- String[] importFileList = new String[] { "File1", "File2" };
- String[] receivedImportFileList;
- testSubject.setImportFileList(importFileList);
- receivedImportFileList = testSubject.getImportFileList();
- assertNotNull(receivedImportFileList);
- assertEquals("File1", receivedImportFileList[0]);
- assertEquals("File2", receivedImportFileList[1]);
- }
private SchemaZipFileEnum createTestSubject() {
return SchemaZipFileEnum.DATA;
@@ -77,16 +36,6 @@ public class SchemaZipFileEnumTest {
}
@Test
- public void testSetFileName() throws Exception {
- SchemaZipFileEnum testSubject;
- String fileName = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setFileName(fileName);
- }
-
- @Test
public void testGetSourceFolderName() throws Exception {
SchemaZipFileEnum testSubject;
String result;
@@ -96,15 +45,6 @@ public class SchemaZipFileEnumTest {
result = testSubject.getSourceFolderName();
}
- @Test
- public void testSetSourceFolderName() throws Exception {
- SchemaZipFileEnum testSubject;
- String sourceFolderName = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setSourceFolderName(sourceFolderName);
- }
@Test
public void testGetSourceFileName() throws Exception {
@@ -117,16 +57,6 @@ public class SchemaZipFileEnumTest {
}
@Test
- public void testSetSourceFileName() throws Exception {
- SchemaZipFileEnum testSubject;
- String sourceFileName = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setSourceFileName(sourceFileName);
- }
-
- @Test
public void testGetCollectionTitle() throws Exception {
SchemaZipFileEnum testSubject;
String result;
@@ -137,16 +67,6 @@ public class SchemaZipFileEnumTest {
}
@Test
- public void testSetCollectionTitle() throws Exception {
- SchemaZipFileEnum testSubject;
- String collectionTitle = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setCollectionTitle(collectionTitle);
- }
-
- @Test
public void testGetImportFileList() throws Exception {
SchemaZipFileEnum testSubject;
String[] result;
@@ -156,13 +76,4 @@ public class SchemaZipFileEnumTest {
result = testSubject.getImportFileList();
}
- @Test
- public void testSetImportFileList() throws Exception {
- SchemaZipFileEnum testSubject;
- String[] importFileList = new String[] { "" };
-
- // default test
- testSubject = createTestSubject();
- testSubject.setImportFileList(importFileList);
- }
} \ No newline at end of file
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
index d8fec34e30..3fce1b6d66 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
@@ -1,14 +1,14 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.List;
-import java.util.Map;
-
import org.junit.Assert;
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
+import java.util.List;
+import java.util.Map;
+
public class ArtifactUuidFixTest {
private ArtifactUuidFix createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java
index af93fc2414..c8d7c9953a 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java
@@ -1,61 +1,250 @@
package org.openecomp.sdc.asdctool.impl;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.be.dao.cassandra.schema.Table;
+import org.openecomp.sdc.be.resources.data.auditing.*;
+import org.openecomp.sdc.common.datastructure.AuditingFieldsKey;
+
+import java.io.IOException;
import java.text.SimpleDateFormat;
-import java.util.EnumMap;
-import java.util.TimeZone;
+import java.util.Date;
+import java.util.HashMap;
-import org.elasticsearch.common.settings.SettingsException;
-import org.junit.Test;
-import org.openecomp.sdc.asdctool.impl.DataMigration.TypeToTableMapping;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingActionEnum;
-import org.openecomp.sdc.common.datastructure.AuditingFieldsKeysEnum;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.openecomp.sdc.common.datastructure.AuditingFieldsKey.*;
+@RunWith(MockitoJUnitRunner.class)
public class DataMigrationTest {
+ private final static String DESCRIPTION = "OK";
+ private final static String STATUS = "200";
+ private final static String SERVICE_INSTANCE_ID = "SERVICE_INSTANCE_ID";
+ private final static String MODIFIER = "MODIFIER";
+ private final static String REQUEST_ID = "REQUEST_ID";
+ private final static String USER = "USER";
+ private final static String USER_BEFORE = "USER_BEFORE";
+ private final static String USER_AFTER = "USER_AFTER";
+ private final static String ARTIFACT_UUID = "ARTIFACT_UUID";
+
+ private final static String PREV_STATE = "PREV_STATE";
+ private final static String CURR_STATE = "CURR_STATE";
+ private final static String PREV_VERSION = "PREV_VERSION";
+ private final static String CURR_VERSION = "CURR_VERSION";
+ private final static String DPREV_STATUS = "DPREV_STATUS";
+ private final static String DCURR_STATUS = "CURR_STATUS";
+ private final static String INVARIANT_UUID = "INVARIANT_UUID";
+ private final static String ARTIFACT_DATA = "ARTIFACT_DATA";
+ private final static String COMMENT = "COMMENT";
+ private final static String DISTRIBUTION_ID = "DISTRIBUTION_ID";
+ private final static String TOSCA_NODE_TYPE = "TOSCA_NODE_TYPE";
+ private final static String CONSUMER_ID = "CONSUMER_ID";
+ private final static String RESOURCE_URL = "RESOURCE_URL";
+ private final static String ENV_ID = "ENV_ID";
+ private final static String VNF_WORKLOAD_CONTEXT = "VNF_WORKLOAD_CONTEXT";
+ private final static String TENANT = "TENANT";
+ private final static String RESOURCE_NAME = "RESOURCE_NAME";
+ private final static String RESOURCE_TYPE = "RESOURCE_TYPE";
+ private final static String AUTH_URL = "AUTH_URL";
+ private final static String AUTH_RELM = "AUTH_RELM";
+ private final static String TOPIC_NAME = "TOPIC_NAME";
+
+ private final static String dateFormatPattern = "yyyy-MM-dd HH:mm:ss.SSS z";
+
+ private static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateFormatPattern);
+
+ private static DataMigration dataMigration = new DataMigration();
+
+ private final static String ES_STRING = "{\"" + AuditingFieldsKey.AUDIT_ACTION + "\":\"%s\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_NAME + "\":\"" + RESOURCE_NAME + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_TOSCA_NODE_TYPE + "\":\"" + TOSCA_NODE_TYPE +
+ "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_PREV_VERSION + "\":\"" + PREV_VERSION + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_PREV_STATE + "\":\"" + PREV_STATE +
+ "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_TYPE + "\":\"" + RESOURCE_TYPE + "\", \"" + AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID + "\":\"" + SERVICE_INSTANCE_ID +
+ "\", \"" + AuditingFieldsKey.AUDIT_INVARIANT_UUID + "\":\"" + INVARIANT_UUID + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION + "\":\"" + CURR_VERSION +
+ "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE + "\":\"" + CURR_STATE + "\", \"" + AuditingFieldsKey.AUDIT_MODIFIER_UID + "\":\"" + MODIFIER +
+ "\", \"" + AuditingFieldsKey.AUDIT_DESC + "\":\"" + DESCRIPTION + "\", \"" + AuditingFieldsKey.AUDIT_STATUS + "\":\"" + STATUS +
+ "\", \"" + AuditingFieldsKey.AUDIT_REQUEST_ID + "\":\"" + REQUEST_ID + "\", \"" + AuditingFieldsKey.AUDIT_CURR_ARTIFACT_UUID + "\":\"" + ARTIFACT_UUID +
+ "\", \"" + AuditingFieldsKey.AUDIT_PREV_ARTIFACT_UUID + "\":\"" + ARTIFACT_UUID + "\", \"" + AuditingFieldsKey.AUDIT_ARTIFACT_DATA + "\":\"" + ARTIFACT_DATA +
+ "\", \"" + AuditingFieldsKey.AUDIT_TIMESTAMP + "\":\"%s\"}";
+
+
+ private String timestampStr;
+
+ private HashMap<AuditingFieldsKey, String> dataMap = new HashMap<>();
+
+ @Before
+ public void setUp() {
+ dataMap.put(AUDIT_DESC, DESCRIPTION);
+ dataMap.put(AUDIT_STATUS, STATUS);
+ dataMap.put(AUDIT_REQUEST_ID, REQUEST_ID);
+ dataMap.put(AUDIT_SERVICE_INSTANCE_ID, SERVICE_INSTANCE_ID);
+ dataMap.put(AUDIT_MODIFIER_UID, MODIFIER);
+ dataMap.put(AUDIT_USER_BEFORE, USER_BEFORE);
+ dataMap.put(AUDIT_USER_UID, USER);
+ dataMap.put(AUDIT_USER_AFTER, USER_AFTER);
+ dataMap.put(AUDIT_AUTH_URL, AUTH_URL);
+ dataMap.put(AUDIT_AUTH_REALM, AUTH_RELM);
+ dataMap.put(AUDIT_PREV_ARTIFACT_UUID, ARTIFACT_UUID);
+ dataMap.put(AUDIT_CURR_ARTIFACT_UUID, ARTIFACT_UUID);
+ dataMap.put(AUDIT_RESOURCE_PREV_STATE, PREV_STATE);
+ dataMap.put(AUDIT_RESOURCE_PREV_VERSION, PREV_VERSION);
+ dataMap.put(AUDIT_RESOURCE_CURR_STATE, CURR_STATE);
+ dataMap.put(AUDIT_RESOURCE_CURR_VERSION, CURR_VERSION);
+ dataMap.put(AUDIT_RESOURCE_DPREV_STATUS, DPREV_STATUS);
+ dataMap.put(AUDIT_RESOURCE_DCURR_STATUS, DCURR_STATUS);
+ dataMap.put(AUDIT_INVARIANT_UUID, INVARIANT_UUID);
+ dataMap.put(AUDIT_ARTIFACT_DATA, ARTIFACT_DATA);
+ dataMap.put(AUDIT_RESOURCE_COMMENT, COMMENT);
+ dataMap.put(AUDIT_DISTRIBUTION_ID, DISTRIBUTION_ID);
+ dataMap.put(AUDIT_RESOURCE_TOSCA_NODE_TYPE, TOSCA_NODE_TYPE);
+ dataMap.put(AUDIT_DISTRIBUTION_CONSUMER_ID, CONSUMER_ID);
+ dataMap.put(AUDIT_RESOURCE_URL, RESOURCE_URL);
+ dataMap.put(AUDIT_DISTRIBUTION_ENVIRONMENT_ID, ENV_ID);
+ dataMap.put(AUDIT_DISTRIBUTION_VNF_WORKLOAD_CONTEXT, VNF_WORKLOAD_CONTEXT);
+ dataMap.put(AUDIT_DISTRIBUTION_TENANT, TENANT);
+ dataMap.put(AUDIT_RESOURCE_NAME, RESOURCE_NAME);
+ dataMap.put(AUDIT_RESOURCE_TYPE, RESOURCE_TYPE);
+ timestampStr = simpleDateFormat.format(new Date());
+ dataMap.put(AUDIT_TIMESTAMP, timestampStr);
+ dataMap.put(AUDIT_DISTRIBUTION_TOPIC_NAME, TOPIC_NAME);
+
+ }
+
+ @Test
+ public void createUserAdminEvent() {
+ dataMap.put(AUDIT_ACTION, AuditingActionEnum.ADD_USER.getName());
+ AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.USER_ADMIN_EVENT);
+ assertThat(AuditingActionEnum.ADD_USER.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, true);
+ verifyUserAdminEvent((UserAdminEvent) event);
+ }
+
+ @Test
+ public void createResourceAdminEvent() {
+ dataMap.put(AUDIT_ACTION, AuditingActionEnum.UPDATE_RESOURCE_METADATA.getName());
+ AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.RESOURCE_ADMIN_EVENT);
+ assertThat(AuditingActionEnum.UPDATE_RESOURCE_METADATA.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, true);
+ verifyResourceAdminEvent((ResourceAdminEvent)event);
+ }
+
+ @Test
+ public void createDistributionNotificationEvent() {
+ dataMap.put(AUDIT_ACTION, AuditingActionEnum.DISTRIBUTION_NOTIFY.getName());
+ AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.DISTRIBUTION_NOTIFICATION_EVENT);
+ assertThat(AuditingActionEnum.DISTRIBUTION_NOTIFY.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, true);
+ verifyDistributionNotificationEvent((DistributionNotificationEvent)event);
+ }
+
+
+ @Test
+ public void createEventForNoneAuditTable() {
+ assertThat(dataMigration.createAuditEvent(dataMap, Table.COMPONENT_CACHE)).isNull();
+
+ }
+
+ @Test
+ public void createEventWhenSomeFieldValuesNotSet() {
+ dataMap.clear();
+ dataMap.put(AUDIT_ACTION, AuditingActionEnum.AUTH_REQUEST.getName());
+ AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.AUTH_EVENT);
+ assertThat(AuditingActionEnum.AUTH_REQUEST.getName()).isEqualTo(event.getAction());
+ assertThat(event.getStatus()).isNull();
+ assertThat(event.getDesc()).isNull();
+ assertThat(event.getRequestId()).isNull();
+ }
+
+ @Test
+ public void createAuthEvent() {
+ dataMap.put(AUDIT_ACTION, AuditingActionEnum.AUTH_REQUEST.getName());
+ AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.AUTH_EVENT);
+ assertThat(AuditingActionEnum.AUTH_REQUEST.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, false);
+ verifyAuthEvent((AuthEvent) event);
+ }
+
+ @Test
+ public void createImportResourceEventFromEsObject() throws IOException{
+ AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr), Table.RESOURCE_ADMIN_EVENT);
+ assertThat(AuditingActionEnum.IMPORT_RESOURCE.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, true);
+ verifyResourceAdminEvent((ResourceAdminEvent)event);
+ }
+
+ @Test
+ public void createGetUserListEventFromEsObject() throws IOException{
+ AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.GET_USERS_LIST.getName(), timestampStr),
+ Table.GET_USERS_LIST_EVENT);
+ assertThat(AuditingActionEnum.GET_USERS_LIST.getName()).isEqualTo(event.getAction());
+ verifyCommonData(event, false);
+ assertThat(((GetUsersListEvent)event).getModifier()).isEqualTo(MODIFIER);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void createEventFromEsFailedWhenActionDoesNotExist() throws IOException {
+ dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, "WRONG", timestampStr),
+ Table.CONSUMER_EVENT);
+ }
+
+ @Test(expected = NullPointerException.class)
+ public void createRecordWhenJsonIsEmpty() throws IOException{
+ dataMigration.createAuditRecordForCassandra("{}",
+ Table.CONSUMER_EVENT);
+ }
+
+ private void verifyCommonData(AuditingGenericEvent event, boolean isServiceInstanceProvided) {
+ assertThat(STATUS).isEqualTo(event.getStatus());
+ if (isServiceInstanceProvided) {
+ assertThat(SERVICE_INSTANCE_ID).isEqualTo(event.getServiceInstanceId());
+ }
+ else {
+ assertThat(event.getServiceInstanceId()).isNull();
+ }
+ assertThat(DESCRIPTION).isEqualTo(event.getDesc());
+ assertThat(REQUEST_ID).isEqualTo(event.getRequestId());
+ }
+
+ private void verifyUserAdminEvent(UserAdminEvent event) {
+ assertThat(USER_AFTER).isEqualTo(event.getUserAfter());
+ assertThat(USER_BEFORE).isEqualTo(event.getUserBefore());
+ verifyTimestamp(event.getTimestamp1());
+ }
+
+ private void verifyAuthEvent(AuthEvent event) {
+ assertThat(USER).isEqualTo(event.getUser());
+ assertThat(AUTH_URL).isEqualTo(event.getUrl());
+ assertThat(event.getAuthStatus()).isNull();
+ assertThat(AUTH_RELM).isEqualTo(event.getRealm());
+ }
+
+ private void verifyTimestamp(Date date) {
+ assertThat(timestampStr).isEqualTo(simpleDateFormat.format(date));
+ }
+
+ private void verifyResourceAdminEvent(ResourceAdminEvent event) {
+ assertThat(CURR_STATE).isEqualTo(event.getCurrState());
+ assertThat(CURR_VERSION).isEqualTo(event.getCurrVersion());
+ assertThat(ARTIFACT_UUID).isEqualTo(event.getCurrArtifactUUID());
+ assertThat(PREV_STATE).isEqualTo(event.getPrevState());
+ assertThat(PREV_VERSION).isEqualTo(event.getPrevVersion());
+ assertThat(ARTIFACT_UUID).isEqualTo(event.getPrevArtifactUUID());
+ assertThat(INVARIANT_UUID).isEqualTo(event.getInvariantUUID());
+ assertThat(ARTIFACT_DATA).isEqualTo(event.getArtifactData());
+ assertThat(RESOURCE_NAME).isEqualTo(event.getResourceName());
+ assertThat(RESOURCE_TYPE).isEqualTo(event.getResourceType());
+ verifyTimestamp(event.getTimestamp1());
+ assertThat(TOSCA_NODE_TYPE).isEqualTo( event.getToscaNodeType());
+ }
+
+ private void verifyDistributionNotificationEvent(DistributionNotificationEvent event) {
+ assertThat(CURR_STATE).isEqualTo(event.getCurrState());
+ assertThat(CURR_VERSION).isEqualTo(event.getCurrVersion());
+ assertThat(TOPIC_NAME).isEqualTo(event.getTopicName());
+ assertThat(DISTRIBUTION_ID).isEqualTo(event.getDid());
+ assertThat(ENV_ID).isEqualTo(event.getEnvId());
+ assertThat(VNF_WORKLOAD_CONTEXT).isEqualTo(event.getVnfWorkloadContext());
+ assertThat(TENANT).isEqualTo(event.getTenant());
+ verifyTimestamp(event.getTimestamp1());
+ }
- private DataMigration createTestSubject() {
- return new DataMigration();
- }
-
- /*@Test(expected=IllegalArgumentException.class)
- public void testMigrateDataEsToCassandra() throws Exception {
- DataMigration testSubject;
- String appConfigDir = "src/main/resources/config/";
- boolean exportFromEs = false;
- boolean importToCassandra = false;
-
- // default test
- testSubject = createTestSubject();
- testSubject.migrateDataEsToCassandra(appConfigDir, exportFromEs, importToCassandra);
- }*/
-
- @Test(expected=NullPointerException.class)
- public void testCreateAuditRecord() throws Exception {
- DataMigration testSubject;
-
- EnumMap<AuditingFieldsKeysEnum, Object> auditingFields = new EnumMap<AuditingFieldsKeysEnum, Object>(AuditingFieldsKeysEnum.class);
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_ACTION, AuditingActionEnum.GET_CATEGORY_HIERARCHY.getName());
-
- String DATE_FORMAT_PATTERN = "yyyy-MM-dd HH:mm:ss.SSS z";
- SimpleDateFormat simpleDateFormat = new SimpleDateFormat(DATE_FORMAT_PATTERN);
- simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-
- auditingFields.put(AuditingFieldsKeysEnum.AUDIT_TIMESTAMP, "2018-05-02 06:06:18.294 UTC");
-
- // default test
- testSubject = createTestSubject();
- testSubject.createAuditRecord(auditingFields);
- }
-
- @Test
- public void testTypeToTableMapping() throws Exception {
- TypeToTableMapping[] values = TypeToTableMapping.values();
-
- for (TypeToTableMapping typeToTableMapping : values) {
- TypeToTableMapping.getTableByType(typeToTableMapping.getTypeName());
- typeToTableMapping.getTable();
-
- }
-
- TypeToTableMapping.getTableByType("stam");
- }
} \ No newline at end of file
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
index 51dfca33a2..ab6c49cdc9 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
@@ -1,9 +1,9 @@
package org.openecomp.sdc.asdctool.impl;
-import java.nio.file.NoSuchFileException;
-
import org.junit.Test;
+import java.nio.file.NoSuchFileException;
+
public class GraphJsonValidatorTest {
private GraphJsonValidator createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
index 3f5d3e144d..fb003e8012 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
@@ -1,13 +1,12 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.List;
-import java.util.Map;
-
+import com.thinkaurelius.titan.core.TitanGraph;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.junit.Test;
-import com.thinkaurelius.titan.core.TitanGraph;
+import java.util.List;
+import java.util.Map;
public class GraphMLConverterTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertexTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertexTest.java
index 503f52a5af..4db5c06262 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertexTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertexTest.java
@@ -1,11 +1,10 @@
package org.openecomp.sdc.asdctool.impl;
-import java.util.List;
-import java.util.Map;
-
+import com.thinkaurelius.titan.core.TitanGraph;
import org.junit.Test;
-import com.thinkaurelius.titan.core.TitanGraph;
+import java.util.List;
+import java.util.Map;
public class UpdatePropertyOnVertexTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java
new file mode 100644
index 0000000000..c0ae55a607
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java
@@ -0,0 +1,60 @@
+package org.openecomp.sdc.asdctool.impl;
+
+import fj.data.Either;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mockito;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.anyMap;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class VrfObjectFixHandlerTest {
+
+ private TitanDao titanDao;
+
+ private VrfObjectFixHandler vrfObjectFixHandler;
+
+ @Before
+ public void init(){
+ titanDao = Mockito.mock(TitanDao.class);
+ vrfObjectFixHandler = new VrfObjectFixHandler(titanDao);
+ }
+
+ @Test
+ public void handleInvalidModeTest(){
+ assertThat(vrfObjectFixHandler.handle("invalid mode", null)).isFalse();
+ }
+
+ @Test
+ public void handleDetectNotFoundTest(){
+ when(titanDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap())).thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
+ assertThat(vrfObjectFixHandler.handle("detect", null)).isTrue();
+ }
+
+ @Test
+ public void handleDetectTitanNotConnectedTest(){
+ when(titanDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap())).thenReturn(Either.right(TitanOperationStatus.NOT_CONNECTED));
+ assertThat(vrfObjectFixHandler.handle("detect", null)).isFalse();
+ }
+
+ @Test
+ public void handleFixNotFoundTest(){
+ when(titanDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap())).thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
+ assertThat(vrfObjectFixHandler.handle("fix", null)).isTrue();
+ }
+
+ @Test
+ public void handleFixNotCreatedTest(){
+ when(titanDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap())).thenReturn(Either.right(TitanOperationStatus.NOT_CREATED));
+ assertThat(vrfObjectFixHandler.handle("fix", null)).isFalse();
+ }
+
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
index 32aec782c1..4f8d96491c 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import java.util.LinkedList;
-
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
+import java.util.LinkedList;
+
public class ArtifactToolBLTest {
private ArtifactToolBL createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
index bb74c70423..69b77415a3 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import java.util.LinkedList;
-
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
+import java.util.LinkedList;
+
public class ValidationToolBLTest {
private ValidationToolBL createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManagerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManagerTest.java
index f639c8a7de..ffdf306520 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManagerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManagerTest.java
@@ -1,9 +1,9 @@
package org.openecomp.sdc.asdctool.impl.validator.config;
-import java.util.Properties;
-
import org.junit.Test;
+import java.util.Properties;
+
public class ValidationConfigManagerTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
index da1ef90a40..ece89dbd21 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
@@ -3,11 +3,7 @@ package org.openecomp.sdc.asdctool.impl.validator.config;
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
-import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
-import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
-import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
-import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.*;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.VfArtifactValidationTask;
@@ -18,14 +14,7 @@ import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsontitan.operations.ArtifactsOperations;
-import org.openecomp.sdc.be.model.jsontitan.operations.CategoryOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.GroupsOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.NodeTypeOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaDataOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementLifecycleOperation;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.jsontitan.operations.*;
public class ValidationToolConfigurationTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
index e14530212a..06f557dbc3 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
@@ -1,16 +1,16 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.Resource;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
public class ArtifactValidatorExecuterTest {
private ArtifactValidatorExecuter createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
index 27deb6bce8..50ee75abd9 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
@@ -1,12 +1,12 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import java.util.List;
-import java.util.Map;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import java.util.List;
+import java.util.Map;
+
public class ArtifactValidationUtilsTest {
private ArtifactValidationUtils createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnumTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnumTest.java
index 163994e423..15cfc36fa6 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnumTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ElementTypeEnumTest.java
@@ -1,9 +1,9 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
-import java.util.List;
-
import org.junit.Test;
+import java.util.List;
+
public class ElementTypeEnumTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
index a50af44a8c..1ad0906949 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
-import java.util.Set;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import java.util.Set;
+
public class ReportManagerTest {
@Test
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadExTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadExTest.java
index 50c01b074c..2e72c9e74d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadExTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadExTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
+import org.junit.Test;
+
import java.util.List;
import java.util.Map;
-import org.junit.Test;
-
public class VfModuleArtifactPayloadExTest {
private VfModuleArtifactPayloadEx createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImportTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImportTest.java
index 791aeb922d..c3851a5b83 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImportTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImportTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.main;
-import java.nio.file.NoSuchFileException;
-
import org.junit.Test;
import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
+import java.nio.file.NoSuchFileException;
+
public class SdcSchemaFileImportTest {
private SdcSchemaFileImport createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMockTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMockTest.java
index feea79fdd1..0d2c4711c6 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMockTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMockTest.java
@@ -1,12 +1,15 @@
package org.openecomp.sdc.asdctool.migration.config.mocks;
+import org.junit.Ignore;
import org.junit.Test;
import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
import org.openecomp.sdc.be.dao.api.ActionStatus;
import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
+@Ignore ("This class does not test anything, there is not a single assertion and the code with reflection fails")
public class DistributionEngineMockTest {
private DistributionEngineMock createTestSubject() {
@@ -31,8 +34,8 @@ public class DistributionEngineMockTest {
INotificationData notificationData = null;
String envName = "";
String userId = "";
- String modifierName = "";
ActionStatus result;
+ User modifierName=new User();
// default test
testSubject = createTestSubject();
@@ -48,12 +51,12 @@ public class DistributionEngineMockTest {
String envId = "";
String envName = "";
String userId = "";
- String modifierName = "";
+ User modifierName = new User();
ActionStatus result;
// default test
testSubject = createTestSubject();
- result = testSubject.notifyService(distributionId, service, notificationData, envId, envName, userId,
+ result = testSubject.notifyService(distributionId, service, notificationData, envId, envName,
modifierName);
}
@@ -88,6 +91,7 @@ public class DistributionEngineMockTest {
testSubject.disableEnvironment(envName);
}
+ @Ignore
@Test
public void testIsReadyForDistribution() throws Exception {
DistributionEngineMock testSubject;
@@ -97,7 +101,7 @@ public class DistributionEngineMockTest {
// default test
testSubject = createTestSubject();
- result = testSubject.isReadyForDistribution(service, envName);
+ result = testSubject.isReadyForDistribution(envName);
}
@Test
@@ -114,17 +118,6 @@ public class DistributionEngineMockTest {
}
@Test
- public void testVerifyServiceHasDeploymentArtifacts() throws Exception {
- DistributionEngineMock testSubject;
- Service service = null;
- StorageOperationStatus result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.verifyServiceHasDeploymentArtifacts(service);
- }
-
- @Test
public void testGetEnvironmentById() throws Exception {
DistributionEngineMock testSubject;
String opEnvId = "";
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/DBVersionTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/DBVersionTest.java
index d5473cd121..830edd7fb2 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/DBVersionTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/DBVersionTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.migration.core;
-import static org.testng.Assert.assertEquals;
-
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
+import static org.testng.Assert.assertEquals;
+
public class DBVersionTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
index da6122c5f0..3af664569f 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
@@ -1,12 +1,5 @@
package org.openecomp.sdc.asdctool.migration.core;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.util.Arrays;
-import java.util.Collections;
-
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
@@ -19,6 +12,11 @@ import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
+import java.util.Arrays;
+import java.util.Collections;
+
+import static org.mockito.Mockito.*;
+
public class SdcMigrationToolTest {
@InjectMocks
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImplTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImplTest.java
index 3f9766d9c5..86ce9b2ccc 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImplTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImplTest.java
@@ -1,13 +1,13 @@
package org.openecomp.sdc.asdctool.migration.core.execution;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
import org.openecomp.sdc.asdctool.migration.DummyMigrationFactory;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.testng.annotations.Test;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
public class MigrationExecutorImplTest {
@Test
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDaoTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDaoTest.java
index 73f50f4ec6..a21356637a 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDaoTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDaoTest.java
@@ -1,10 +1,10 @@
package org.openecomp.sdc.asdctool.migration.dao;
-import java.math.BigInteger;
-
import org.junit.Test;
import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
+import java.math.BigInteger;
+
public class MigrationTasksDaoTest {
private MigrationTasksDao createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolverTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolverTest.java
index 47bf9c9c74..b6267d3950 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolverTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolverTest.java
@@ -1,13 +1,5 @@
package org.openecomp.sdc.asdctool.migration.resolver;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
@@ -20,6 +12,14 @@ import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
public class SpringBeansMigrationResolverTest {
@InjectMocks
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
index c50cae0673..38cf068cbb 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
@@ -1,16 +1,12 @@
package org.openecomp.sdc.asdctool.migration.scanner;
+import org.apache.commons.io.FileUtils;
+import org.openecomp.sdc.asdctool.migration.core.MigrationException;
+
import java.io.File;
import java.lang.reflect.Modifier;
import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
-
-import org.apache.commons.io.FileUtils;
-import org.openecomp.sdc.asdctool.migration.core.MigrationException;
+import java.util.*;
/**
* scan and instantiate classes of given type in the class path
@@ -76,6 +72,4 @@ public class ClassScanner {
String classes = "classes.";
return asPackage.substring(asPackage.indexOf(classes) + classes.length());
}
-
-
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
index 2745f59006..dbaf443935 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
@@ -1,11 +1,5 @@
package org.openecomp.sdc.asdctool.migration.service;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-
-import java.math.BigInteger;
-
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
@@ -16,6 +10,12 @@ import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
+import java.math.BigInteger;
+
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+
public class SdcRepoServiceTest {
@InjectMocks
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
index 52dc7c9fcd..8d48a5e9ad 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
@@ -1,11 +1,5 @@
package org.openecomp.sdc.asdctool.migration.task;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-
import org.apache.commons.lang.StringUtils;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
@@ -14,6 +8,12 @@ import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
public class MigrationTasksTest {
@@ -21,13 +21,13 @@ public class MigrationTasksTest {
private List<Migration> migrations;
@BeforeMethod
- public void setUp() {
+ public void setUp() throws Exception {
ClassScanner classScanner = new ClassScanner();
migrations = classScanner.getAllClassesOfType(MIGRATIONS_BASE_PACKAGE, Migration.class);
}
@Test
- public void testNoTasksWithSameVersion() {
+ public void testNoTasksWithSameVersion() throws Exception {
Map<DBVersion, List<Migration>> migrationsByVersion = migrations.stream().collect(Collectors.groupingBy(Migration::getVersion));
migrationsByVersion.forEach((version, migrations) -> {
if (migrations.size() > 1) {
@@ -38,14 +38,14 @@ public class MigrationTasksTest {
}
@Test
- public void testNoTaskWithVersionGreaterThanCurrentVersion() {
+ public void testNoTaskWithVersionGreaterThanCurrentVersion() throws Exception {
Set<Migration> migrationsWithVersionsGreaterThanCurrent = migrations.stream().filter(mig -> mig.getVersion().compareTo(DBVersion.CURRENT_VERSION) > 0)
.collect(Collectors.toSet());
if (!migrationsWithVersionsGreaterThanCurrent.isEmpty()) {
Assert.fail(String.format("migrations tasks %s have version which is greater than DBVersion.CURRENT_VERSION %s. did you forget to update current version?",
- getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent),
- DBVersion.CURRENT_VERSION.toString()));
+ getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent),
+ DBVersion.CURRENT_VERSION.toString()));
}
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
index 66d9b15e4b..19f6f26ac8 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
@@ -1,40 +1,65 @@
package org.openecomp.sdc.asdctool.migration.tasks.handlers;
+import org.apache.poi.ss.usermodel.Workbook;
import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Spy;
+import org.mockito.junit.MockitoJUnitRunner;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.*;
+
+@RunWith(MockitoJUnitRunner.class)
public class XlsOutputHandlerTest {
- private XlsOutputHandler createTestSubject() {
- return new XlsOutputHandler(new Object());
- }
-
- @Test
- public void testInitiate() throws Exception {
- XlsOutputHandler testSubject;
- Object[] title = new Object[] { null };
-
- // default test
- testSubject = createTestSubject();
- testSubject.initiate(title);
- }
-
- @Test
- public void testAddRecord() throws Exception {
- XlsOutputHandler testSubject;
- Object[] record = new Object[] { null };
-
- // default test
- testSubject = createTestSubject();
- testSubject.addRecord(record);
- }
-
- @Test
- public void testWriteOutput() throws Exception {
- XlsOutputHandler testSubject;
- boolean result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.writeOutput();
- }
-} \ No newline at end of file
+ @Spy
+ private XlsOutputHandler handler = new XlsOutputHandler(null, "mock");
+
+ @Mock
+ private Workbook workbook;
+ @Mock
+ private FileOutputStream xlsFile;
+
+ @Test
+ public void verifyThatFileIsNotCreatedIfNoRecordsAdded() throws IOException {
+ assertFalse(handler.writeOutputAndCloseFile());
+ verify(workbook, times(0)).write(any());
+ }
+
+ @Test
+ public void verifyThatFileIsCreatedIfSomeRecordsAdded() throws IOException {
+ handler.addRecord("mock");
+ doReturn(xlsFile).when(handler).getXlsFile();
+ assertTrue(handler.writeOutputAndCloseFile());
+ }
+
+
+ private XlsOutputHandler createTestSubject() {
+ return new XlsOutputHandler("mock", "mockPath", new Object());
+ }
+
+ @Test
+ public void testInitiate() throws Exception {
+ XlsOutputHandler testSubject;
+ Object[] title = new Object[] { null };
+ // default test
+ testSubject = createTestSubject();
+ testSubject.initiate("mock", title);
+ }
+
+ @Test
+ public void testAddRecord() throws Exception {
+ XlsOutputHandler testSubject;
+ Object[] record = new Object[] { null };
+
+ // default test
+ testSubject = createTestSubject();
+ testSubject.addRecord(record);
+ }
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
index f2a0a3c716..df5761d893 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
@@ -1,21 +1,9 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1710;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
+import com.google.common.collect.Lists;
+import fj.data.Either;
+import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -25,35 +13,56 @@ import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
+import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.exceptions.ComponentException;
import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
+import org.openecomp.sdc.be.components.scheduledtasks.ComponentsCleanBusinessLogic;
import org.openecomp.sdc.be.config.Configuration;
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.be.dao.api.ActionStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.Component;
-import org.openecomp.sdc.be.model.ComponentInstance;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.Resource;
-import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.*;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.http.client.api.HttpRequestHandler;
import org.openecomp.sdc.exception.ResponseFormat;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.*;
-import fj.data.Either;
@RunWith(MockitoJUnitRunner.class)
public class UpgradeMigration1710Test {
- private final static String USER = "jh0003";
- private final static String CONF_LEVEL = "5.0";
+ private static final String USER = "jh0003";
+ private static final String CONF_LEVEL = "5.0";
+ private static final String COMPONENT_UNIQUE_ID = "12345";
+ private static final String OLD_VERSION = "1.0";
+ private static final String UPDATED_VERSION = "2.0";
+ private static final String CSAR_UUID = "1234578";
+ private static HttpRequestHandler originHandler;
private final User user = new User();
@@ -70,18 +79,40 @@ public class UpgradeMigration1710Test {
@Mock
private ComponentsUtils componentUtils;
@Mock
+ private CsarOperation csarOperation;
+ @Mock
private ConfigurationSource configurationSource;
+ //don't remove - it is intended to avoid the xls file generating
@Mock
private XlsOutputHandler outputHandler;
+ @Mock
+ private ResourceBusinessLogic resourceBusinessLogic;
+ @Mock
+ private ServiceBusinessLogic serviceBusinessLogic;
+ @Mock
+ private ResponseFormat responseFormat;
+ @Mock
+ private ComponentsCleanBusinessLogic componentsCleanBusinessLogic;
private static ConfigurationManager configurationManager;
private static List<String> resources = Stream.of("org.openecomp.resource.cp.extCP").collect(Collectors.toList());
private static Map<String, List<String>> resourcesForUpgrade;
+ private Resource resource;
+ private Service service;
+ private List<String> vfList = new ArrayList<>();
+
@BeforeClass
public static void setUpClass() {
resourcesForUpgrade = new HashMap<>();
resourcesForUpgrade.put(CONF_LEVEL, resources);
+ originHandler = HttpRequestHandler.get();
+ }
+
+ @AfterClass
+ public static void tearDownClass() {
+ //put the origin handler back
+ HttpRequestHandler.setTestInstance(originHandler);
}
@Before
@@ -92,43 +123,112 @@ public class UpgradeMigration1710Test {
configurationManager.getConfiguration().setSkipUpgradeVSPs(true);
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(true);
configurationManager.getConfiguration().setAutoHealingOwner(USER);
+ configurationManager.getConfiguration().setSupportAllottedResourcesAndProxy(true);
+ configurationManager.getConfiguration().setDeleteLockTimeoutInSeconds(10);
+ configurationManager.getConfiguration().setMaxDeleteComponents(5);
+ configurationManager.getConfiguration().setEnableAutoHealing(true);
+ configurationManager.getConfiguration().setToscaConformanceLevel("5.0");
+ HashMap<String, List<String>> resourcesForUpgrade = new HashMap();
+ resourcesForUpgrade.put("5.0", Lists.newArrayList("port"));
+ configurationManager.getConfiguration().setResourcesForUpgrade(resourcesForUpgrade);
+
+ migration.init();
+ migration.setNodeTypesSupportOnly(false);
+ when(componentsCleanBusinessLogic.lockDeleteOperation()).thenReturn(StorageOperationStatus.OK);
+
+ resource = new Resource();
+ resource.setCsarUUID(CSAR_UUID);
+ resource.setVersion(OLD_VERSION);
+ resource.setUniqueId(COMPONENT_UNIQUE_ID);
+ service = new Service();
+ service.setVersion(OLD_VERSION);
+ service.setUniqueId(COMPONENT_UNIQUE_ID);
+
+ vfList.add(COMPONENT_UNIQUE_ID);
+
+ when(responseFormat.getFormattedMessage())
+ .thenReturn("");
+ when(componentUtils.getResponseFormat(any(ActionStatus.class), any()))
+ .thenReturn(responseFormat);
+ when(componentUtils.convertFromStorageResponse(any(), any())).thenCallRealMethod();
+ mockChangeComponentState();
}
@Test
public void nodeTypesUpgradeFailed() {
+ migration.setNodeTypesSupportOnly(true);
resolveUserAndDefineUpgradeLevel();
when(titanDao.getByCriteria(any(), any(), any(), any()))
- .thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
+ .thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
+ assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
+ }
+
+ @Test
+ public void migrationDisabled() {
+ configurationManager.getConfiguration().setEnableAutoHealing(false);
+ migration.init();
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(0)).commit();
+ verify(titanDao, times(0)).rollback();
+ }
+
+ @Test
+ public void migrationFailedIfDeleteNodeLockFailed() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation())
+ .thenReturn(StorageOperationStatus.BAD_REQUEST);
+ assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
+ }
+
+ @Test
+ public void migrationFailedIfDeleteNodeLockRetryFailed() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation())
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT)
+ .thenReturn(StorageOperationStatus.BAD_REQUEST);
assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
}
@Test
+ public void nodeTypesOnlyUpgradePassed() {
+ migration.setNodeTypesSupportOnly(true);
+ upgradeAllScenario(false);
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(2)).commit();
+ verify(titanDao, times(0)).rollback();
+ }
+
+ @Test
public void nodeTypesUpgradePassedAndVFsUpgradeFailedWhenSkipFailedVFsIsNotSupported() {
final boolean failOnVfUpgrade = true;
final boolean upgradeServices = false;
final boolean exceptionOnVfUpgrade = false;
final boolean upgradeVFC = false;
+ final boolean isFailed = true;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(false);
+ migration.init();
+ migration.setNodeTypesSupportOnly(false);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC);
+ upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC, isFailed);
assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
verify(titanDao, times(1)).commit();
verify(titanDao, times(2)).rollback();
}
+
@Test
public void upgradeAllVFsUpgradeFailedOnExceptionWhenSkipFailedVFsIsNotSupported() {
final boolean failOnVfUpgrade = false;
final boolean upgradeServices = false;
final boolean exceptionOnVfUpgrade = true;
final boolean upgradeVFC = false;
+ final boolean isFailed = true;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(false);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC);
- assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
- verify(titanDao, times(1)).commit();
- verify(titanDao, times(2)).rollback();
+ upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC, isFailed);
+ migration.init();
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(2)).commit();
+ verify(titanDao, times(0)).rollback();
}
@Test
@@ -137,26 +237,33 @@ public class UpgradeMigration1710Test {
final boolean upgradeServices = true;
final boolean exceptionOnFvUpgrade = true;
final boolean upgradeVFC = false;
+ final boolean isFailed = false;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(true);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC, isFailed);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
- verify(titanDao, times(2)).commit();
- verify(titanDao, times(3)).rollback();
+ verify(titanDao, times(3)).commit();
+ verify(titanDao, times(1)).rollback();
}
@Test
public void upgradeAll() {
- final boolean failOnVfUpgrade = false;
- final boolean upgradeServices = true;
- final boolean exceptionOnFvUpgrade = false;
- final boolean upgradeVFC = false;
- resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
+ upgradeAllScenario(true);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
- verify(titanDao, times(2)).commit();
- verify(titanDao, times(3)).rollback();
+ verify(titanDao, times(4)).commit();
+ verify(titanDao, times(0)).rollback();
+ }
+
+ @Test
+ public void upgradeAllWhenDeleteLockRetrySucceeded() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation())
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT)
+ .thenReturn(StorageOperationStatus.OK);
+ upgradeAllScenario(true);
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(4)).commit();
+ verify(titanDao, times(0)).rollback();
}
@Test
@@ -165,10 +272,10 @@ public class UpgradeMigration1710Test {
final boolean upgradeServices = true;
final boolean exceptionOnFvUpgrade = false;
final boolean upgradeVFC = true;
+ final boolean isFailed = true;
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC, isFailed);
configurationManager.getConfiguration().setSkipUpgradeVSPs(false);
-// migration.setComponentsUtils(componentUtils);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
}
@@ -179,70 +286,306 @@ public class UpgradeMigration1710Test {
assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
}
+ @Test
+ public void verifyThatCheckedOutResourcesMarkedAsDeletedIfUpgradeFailed() {
+ mockCheckoutFlow();
+ when(resourceBusinessLogic.validateAndUpdateResourceFromCsar(any(Resource.class), any(), any(), any(),
+ any()))
+ .thenThrow(new ComponentException(responseFormat));
+ when(resourceBusinessLogic.deleteResource(anyString(), any()))
+ .thenReturn(responseFormat);
+ mockChangeComponentState();
+ migration.upgradeVFs(vfList, false);
+ verify(resourceBusinessLogic).deleteResource(anyString(), any());
+ }
+
+ @Test
+ public void verifyThatCheckedOutAllottedResourcesMarkedAsDeletedIfUpgradeFailed() {
+ mockCheckoutFlow();
+ when(resourceBusinessLogic.validateAndUpdateResourceFromCsar(any(Resource.class), any(), any(), any(),
+ any()))
+ .thenThrow(new ComponentException(responseFormat));
+ when(resourceBusinessLogic.deleteResource(anyString(), any()))
+ .thenReturn(responseFormat);
+ mockChangeComponentState();
+ migration.upgradeVFs(vfList, true);
+ verify(resourceBusinessLogic).deleteResource(anyString(), any());
+ }
+
+ @Test
+ public void verifyThatCheckedOutResourceIsNotMarkedAsDeletedIfUpgradeSucceeded() {
+ mockCheckoutFlow();
+ resource.setVersion(UPDATED_VERSION);
+ when(resourceBusinessLogic.validateAndUpdateResourceFromCsar(any(Resource.class), any(), any(), any(),
+ any()))
+ .thenReturn(resource);
+ mockChangeComponentState();
+ migration.upgradeVFs(vfList, true);
+ verify(resourceBusinessLogic, times(0)).deleteResource(anyString(), any());
+ }
+
+ @Test
+ public void verifyThatCheckedOutServicesMarkedAsDeletedIfUpgradeFailed() {
+ List<String> servicesForUpgrade = new ArrayList<>();
+ servicesForUpgrade.add(COMPONENT_UNIQUE_ID);
+
+ Either<Resource, StorageOperationStatus> foundServices = Either.left(resource);
+ mockCheckoutFlow();
+ when(toscaOperationFacade.getToscaElement(any(), any(ComponentParametersView.class)))
+ .thenReturn(Either.left(service));
+ when(toscaOperationFacade.getLatestCertifiedByToscaResourceName(any(), any(), any()))
+ .thenReturn(foundServices);
+ migration.upgradeServices(servicesForUpgrade, component -> true, "services");
+ verify(serviceBusinessLogic, times(0)).deleteService(anyString(), any());
+ }
+
+ @Test
+ public void verifyThatCheckedOutServicesIsNotMarkedAsDeletedIfUpgradeSucceeded() {
+ List<String> servicesForUpgrade = new ArrayList<>();
+ servicesForUpgrade.add(COMPONENT_UNIQUE_ID);
+
+ mockCheckoutFlow();
+ when(toscaOperationFacade.getLatestCertifiedByToscaResourceName(anyString(), any(VertexTypeEnum.class), any(JsonParseFlagEnum.class)))
+ .thenReturn(Either.right(StorageOperationStatus.NOT_FOUND));
+ when(serviceBusinessLogic.deleteService(anyString(), any()))
+ .thenReturn(responseFormat);
+ migration.upgradeServices(servicesForUpgrade, component -> true, "services");
+ verify(serviceBusinessLogic).deleteService(anyString(), any());
+ }
+
+
+ @Test
+ public void unlockDeleteOperationIsPerformedIfItWasLocked() {
+ migration.isLockDeleteOperationSucceeded();
+ migration.unlockDeleteOperation();
+ verify(componentsCleanBusinessLogic).unlockDeleteOperation();
+ }
+
+ @Test
+ public void unlockDeleteOperationIsNotPerformedIfItWasNotLocked() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation()).thenReturn(StorageOperationStatus.GENERAL_ERROR);
+ migration.isLockDeleteOperationSucceeded();
+ migration.unlockDeleteOperation();
+ verify(componentsCleanBusinessLogic, times(0)).unlockDeleteOperation();
+ }
+
+ @Test
+ public void deleteLockSucceededAfterRetry() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation())
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT)
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT)
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT)
+ .thenReturn(StorageOperationStatus.OK);
+ migration.isLockDeleteOperationSucceeded();
+ migration.unlockDeleteOperation();
+ verify(componentsCleanBusinessLogic).unlockDeleteOperation();
+ }
+
+ @Test
+ public void deleteLockFailedAfterRetry() {
+ when(componentsCleanBusinessLogic.lockDeleteOperation())
+ .thenReturn(StorageOperationStatus.FAILED_TO_LOCK_ELEMENT);
+ migration.isLockDeleteOperationSucceeded();
+ migration.unlockDeleteOperation();
+ verify(componentsCleanBusinessLogic, times(0)).unlockDeleteOperation();
+ }
+
+ @Test
+ public void deleteMarkedResourcesWhenLimitIsReached() {
+ ArrayList<NodeTypeEnum> componentsToClean = new ArrayList<>();
+ componentsToClean.add(NodeTypeEnum.Resource);
+ migration.setUser(user);
+ migration.setMarkedAsDeletedResourcesCnt(5);
+ migration.deleteResourcesIfLimitIsReached();
+ verify(componentsCleanBusinessLogic).cleanComponents(componentsToClean, true);
+ }
+
+ @Test
+ public void deleteMarkedResourcesNotCalledWhenLimitIsNotReached() {
+ ArrayList<NodeTypeEnum> componentsToClean = new ArrayList<>();
+ componentsToClean.add(NodeTypeEnum.Resource);
+ migration.setUser(user);
+ migration.setMarkedAsDeletedResourcesCnt(3);
+ migration.deleteResourcesIfLimitIsReached();
+ verify(componentsCleanBusinessLogic, times(0)).cleanComponents(componentsToClean, true);
+ }
+
+ @Test
+ public void deleteMarkedServicesWhenLimitIsReached() {
+ ArrayList<NodeTypeEnum> componentsToClean = new ArrayList<>();
+ componentsToClean.add(NodeTypeEnum.Service);
+ migration.setUser(user);
+ migration.setMarkedAsDeletedServicesCnt(5);
+ migration.deleteServicesIfLimitIsReached();
+ verify(componentsCleanBusinessLogic).cleanComponents(componentsToClean, true);
+ }
+
+ @Test
+ public void deleteMarkedServicesNotCalledWhenLimitIsNotReached() {
+ ArrayList<NodeTypeEnum> componentsToClean = new ArrayList<>();
+ componentsToClean.add(NodeTypeEnum.Service);
+ migration.setUser(user);
+ migration.setMarkedAsDeletedServicesCnt(2);
+ migration.deleteServicesIfLimitIsReached();
+ verify(componentsCleanBusinessLogic, times(0)).cleanComponents(componentsToClean, true);
+ }
+
+ @Test
+ public void getVfUpgradeStatusWhenUpgradeFailedAndItIsInstance() {
+ assertEquals(UpgradeMigration1710.UpgradeStatus.NOT_UPGRADED, migration.getVfUpgradeStatus(false, true));
+ }
+
+ @Test
+ public void getVfUpgradeStatusWhenUpgradeFailedAndItIsNotInstance() {
+ assertEquals(UpgradeMigration1710.UpgradeStatus.NOT_UPGRADED, migration.getVfUpgradeStatus(false, false));
+ }
+
+ @Test
+ public void getVfUpgradeStatusWhenUpgradeSucceededAndItIsInstance() {
+ assertEquals(UpgradeMigration1710.UpgradeStatus.UPGRADED_AS_INSTANCE, migration.getVfUpgradeStatus(true, true));
+ }
+
+ @Test
+ public void getVfUpgradeStatusWhenUpgradeSucceededAndItIsNotInstance() {
+ assertEquals(UpgradeMigration1710.UpgradeStatus.UPGRADED, migration.getVfUpgradeStatus(true, false));
+ }
+
private void resolveUserAndDefineUpgradeLevel() {
when(userAdminOperation.getUserData(anyString(), eq(false))).thenReturn(Either.left(user));
configurationManager.getConfiguration().setToscaConformanceLevel(CONF_LEVEL);
configurationManager.getConfiguration().setResourcesForUpgrade(resourcesForUpgrade);
}
- private void upgradeRules(boolean failedVfUpgrade, boolean exceptionOnVfUpgrade, boolean upgradeService, boolean upgradeVFCs) {
- GraphVertex component = new GraphVertex();
- component.setJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE, LifecycleStateEnum.CERTIFIED.name());
- component.setJsonMetadataField(JsonPresentationFields.UNIQUE_ID, "12345");
- List<GraphVertex> components = Lists.newArrayList();
- components.add(component);
+ private void upgradeAllScenario(boolean upgradeServices) {
+ final boolean failOnVfUpgrade = false;
+ final boolean exceptionOnFvUpgrade = false;
+ final boolean upgradeVFC = false;
+ final boolean isFailed = false;
+ final boolean isProxy = true;
+
+ resolveUserAndDefineUpgradeLevel();
+ mockCheckoutFlow();
+ when(resourceBusinessLogic.validateAndUpdateResourceFromCsar(any(Resource.class), any(), any(), any(),
+ any()))
+ .thenReturn(resource);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC, isFailed, isProxy);
+ }
+
+ private void upgradeRules(boolean failedVfUpgrade, boolean exceptionOnVfUpgrade, boolean upgradeService,
+ boolean upgradeVFCs, boolean isFailed) {
+ upgradeRules(failedVfUpgrade, exceptionOnVfUpgrade, upgradeService, upgradeVFCs, isFailed, false);
+ }
- Resource resource = new Resource();
+ private void upgradeRules(boolean failedVfUpgrade, boolean exceptionOnVfUpgrade, boolean upgradeService,
+ boolean upgradeVFCs, boolean isFailed, boolean isProxy) {
+
+ mockNodeTypesUpgrade();
Either<Component, StorageOperationStatus> foundResource = Either.left(resource);
- when(titanDao.getByCriteria(any(), any(), any(), any()))
- .thenReturn(Either.left(components));
- when(titanDao.getParentVertecies(any(GraphVertex.class), any(), any()))
- //1th node to upgrade
- .thenReturn(Either.left(components))
- //parent of the 1th node - stop recursion
- .thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
if (failedVfUpgrade) {
- Either<Component, StorageOperationStatus> getToscaForVF = Either.right(StorageOperationStatus.NOT_FOUND);
- when(toscaOperationFacade.getToscaElement(anyString())).thenReturn(foundResource)
- .thenReturn(foundResource)
- .thenReturn(getToscaForVF);
- }
- else {
+ getToscaElementMockForVfUpgradeFailedScenario(foundResource);
+ } else {
if (exceptionOnVfUpgrade) {
- when(toscaOperationFacade.getToscaElement(anyString())).thenReturn(foundResource)
- .thenReturn(foundResource)
- .thenThrow(new RuntimeException());
- }
- else {
- when(toscaOperationFacade.getToscaElement(anyString())).thenReturn(foundResource);
- //happy flow
- if (upgradeService) {
- Either<Resource, StorageOperationStatus> service = Either.left(resource);
- if (upgradeVFCs) {
- when(componentUtils.convertFromStorageResponse(any(), any())).thenCallRealMethod();
- when(componentUtils.getResponseFormat(any(ActionStatus.class),any())).thenCallRealMethod();
- when(toscaOperationFacade.getLatestCertifiedByToscaResourceName(any(), any(), any()))
- .thenReturn(service)
- .thenReturn(Either.right(StorageOperationStatus.NOT_FOUND))
- .thenReturn(service)
- .thenReturn(Either.right(StorageOperationStatus.NOT_FOUND));
- }
- else {
- when(toscaOperationFacade.getLatestCertifiedByToscaResourceName(any(), any(), any()))
- .thenReturn(service);
- }
- }
+ getToscaElementMockForExceptionOnUpgradeScenario(foundResource, upgradeService);
+ } else {
+ when(toscaOperationFacade.getToscaElement(anyString()))
+ .thenReturn(foundResource);
}
}
+ //happy flow
+ if (upgradeService) {
+ mockForUpgradeServiceScenario(foundResource, upgradeVFCs, isFailed);
+ }
+ }
+
+ private void mockNodeTypesUpgrade() {
+ GraphVertex component = createComponent();
+ List<GraphVertex> components = Lists.newArrayList();
+ components.add(component);
+
+ when(titanDao.getByCriteria(any(), any(), any(), any()))
+ .thenReturn(Either.left(components));
+ when(titanDao.getParentVertecies(any(GraphVertex.class), any(EdgeLabelEnum.class), any(JsonParseFlagEnum.class)))
+ //1th node to upgrade
+ .thenReturn(Either.left(components))
+ //parent of the 1th node - stop recursion
+ .thenReturn(Either.right(TitanOperationStatus.NOT_FOUND));
+ }
+
+ private GraphVertex createComponent() {
+ GraphVertex component = new GraphVertex();
+ component.setJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE,LifecycleStateEnum.CERTIFIED.name());
+ component.setJsonMetadataField(JsonPresentationFields.UNIQUE_ID,COMPONENT_UNIQUE_ID);
+ component.setJsonMetadataField(JsonPresentationFields.CI_COMPONENT_VERSION,UPDATED_VERSION);
+ return component;
+ }
+
+ private void mockChangeComponentState() {
List<ComponentInstance> instances = Lists.newArrayList();
instances.add(createComponentInstance());
- resource.setComponentInstances(instances);
- Either<Resource, ResponseFormat> fromLifeCycle = Either.left(resource);
+
+ Resource checkedOutResource = new Resource();
+ checkedOutResource.setUniqueId("123400");
+ checkedOutResource.setComponentInstances(instances);
+ Either<Resource, ResponseFormat> fromLifeCycle = Either.left(checkedOutResource);
doReturn(fromLifeCycle).when(lifecycleBusinessLogic)
.changeComponentState(any(), any(), any(), any(), any(),eq(true), eq(false));
+ }
+
+ private void getToscaElementMockForVfUpgradeFailedScenario(Either<Component, StorageOperationStatus> foundResource) {
+ when(toscaOperationFacade.getToscaElement(anyString()))
+ .thenReturn(foundResource)
+ .thenReturn(foundResource)
+ .thenReturn(Either.right(StorageOperationStatus.NOT_FOUND));
+ }
+
+ private void mockForUpgradeServiceScenario(Either<Component, StorageOperationStatus> foundResource, boolean upgradeVFC, boolean isFailed) {
+ Either<Resource, StorageOperationStatus> foundService = Either.left(resource);
+ if (upgradeVFC) {
+ when(toscaOperationFacade.getToscaElement(anyString()))
+ .thenReturn(foundResource)
+ .thenReturn(foundResource)
+ .thenReturn(Either.right(StorageOperationStatus.NOT_FOUND));
+ }
+ else if (!isFailed) {
+ when(toscaOperationFacade.getToscaElement(any(), any(ComponentParametersView.class)))
+ .thenReturn(Either.left(resource));
+ when(toscaOperationFacade.getLatestCertifiedByToscaResourceName(any(), any(), any()))
+ .thenReturn(foundService);
+ }
+ }
+
+ private void getToscaElementMockForExceptionOnUpgradeScenario(Either<Component, StorageOperationStatus> foundResource, boolean upgradeService) {
+ if (upgradeService) {
+ service.setVersion(UPDATED_VERSION);
+ Either<Component, StorageOperationStatus> foundService = Either.left(service);
+ when(toscaOperationFacade.getToscaElement(anyString()))
+ .thenReturn(foundResource)
+ .thenReturn(foundResource)
+ .thenThrow(new RuntimeException())
+ .thenReturn(foundService);
+ }
+ else {
+ when(toscaOperationFacade.getToscaElement(anyString()))
+ .thenReturn(foundResource)
+ .thenReturn(foundResource)
+ .thenThrow(new RuntimeException());
+ }
+ }
- }
+ private void mockCheckoutFlow() {
+ GraphVertex component = new GraphVertex();
+ component.setJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE, LifecycleStateEnum.CERTIFIED.name());
+ component.setJsonMetadataField(JsonPresentationFields.UNIQUE_ID, COMPONENT_UNIQUE_ID);
+ List<GraphVertex> components = Lists.newArrayList();
+ components.add(component);
+
+ when(toscaOperationFacade.getToscaElement(anyString())).thenReturn(Either.left(resource));
+ when(titanDao.getByCriteria(any(), any(), any(), any()))
+ .thenReturn(Either.left(components));
+ when(csarOperation.getCsarLatestVersion(anyString(), any()))
+ .thenReturn(Either.left("2.0"));
+ }
private ComponentInstance createComponentInstance() {
ComponentInstance instance = new ComponentInstance();
@@ -260,7 +603,7 @@ public class UpgradeMigration1710Test {
instance.setOriginType(OriginTypeEnum.CVFC);
instance.setCustomizationUUID("");
instance.setComponentName("");
- instance.setComponentVersion("");
+ instance.setComponentVersion(OLD_VERSION);
instance.setToscaComponentName("");
instance.setInvariantName("");
instance.setSourceModelInvariant("");
@@ -269,7 +612,7 @@ public class UpgradeMigration1710Test {
instance.setSourceModelUid("");
instance.setIsProxy(false);
return instance;
- }
+ }
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java
new file mode 100644
index 0000000000..d040d66496
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java
@@ -0,0 +1,69 @@
+package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
+
+import fj.data.Either;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult.MigrationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.model.jsontitan.enums.JsonConstantKeysEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.NodeTemplateOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class SDCInstancesMigrationTest{
+ @Mock
+ private TitanDao titanDao;
+ @Mock
+ private NodeTemplateOperation nodeTemplateOperation;
+
+
+ @Test
+ public void testFailedMigration(){
+ SDCInstancesMigration instancesMigration = new SDCInstancesMigration(titanDao, nodeTemplateOperation);
+ when(titanDao.getByCriteria(any(), any(), any(), any() )).thenReturn(Either.right(TitanOperationStatus.GENERAL_ERROR));
+
+ MigrationResult migrate = instancesMigration.migrate();
+ MigrationStatus migrationStatus = migrate.getMigrationStatus();
+ assertEquals(MigrationStatus.FAILED, migrationStatus);
+ }
+ @Test
+ public void testSuccessMigration(){
+ SDCInstancesMigration instancesMigration = new SDCInstancesMigration(titanDao, nodeTemplateOperation);
+ List<GraphVertex> list = new ArrayList<>();
+ GraphVertex vertexOrig = new GraphVertex();
+ Map<String, CompositionDataDefinition> jsonComposition = new HashMap<>();
+ CompositionDataDefinition composition = new CompositionDataDefinition();
+ Map<String, ComponentInstanceDataDefinition> componentInstances = new HashMap<>();
+ ComponentInstanceDataDefinition instance = new ComponentInstanceDataDefinition();
+ componentInstances.put("instanceId", instance);
+ composition.setComponentInstances(componentInstances);
+ jsonComposition.put(JsonConstantKeysEnum.COMPOSITION.getValue(), composition);
+ vertexOrig.setJson(jsonComposition);
+ vertexOrig.setType(ComponentTypeEnum.SERVICE);
+ list.add(vertexOrig);
+
+ when(titanDao.getByCriteria(any(), any(), any(), any() )).thenReturn(Either.left(list));
+ when(nodeTemplateOperation.createInstanceEdge(vertexOrig, instance)).thenReturn(StorageOperationStatus.OK);
+
+ MigrationResult migrate = instancesMigration.migrate();
+ MigrationStatus migrationStatus = migrate.getMigrationStatus();
+ assertEquals(MigrationStatus.COMPLETED, migrationStatus);
+ }
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServletTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServletTest.java
index 2733beeda7..9f104632dd 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServletTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServletTest.java
@@ -1,12 +1,10 @@
package org.openecomp.sdc.asdctool.servlets;
-import java.io.File;
-
-import javax.ws.rs.core.Response;
-
+import com.thinkaurelius.titan.core.TitanGraph;
import org.junit.Test;
-import com.thinkaurelius.titan.core.TitanGraph;
+import javax.ws.rs.core.Response;
+import java.io.File;
public class ExportImportTitanServletTest {