aboutsummaryrefslogtreecommitdiffstats
path: root/asdctool
diff options
context:
space:
mode:
authorys9693 <ys9693@att.com>2020-01-19 13:50:02 +0200
committerOfir Sonsino <ofir.sonsino@intl.att.com>2020-01-22 12:33:31 +0000
commit16a9fce0e104a38371a9e5a567ec611ae3fc7f33 (patch)
tree03a2aff3060ddb5bc26a90115805a04becbaffc9 /asdctool
parentaa83a2da4f911c3ac89318b8e9e8403b072942e1 (diff)
Catalog alignment
Issue-ID: SDC-2724 Signed-off-by: ys9693 <ys9693@att.com> Change-Id: I52b4aacb58cbd432ca0e1ff7ff1f7dd52099c6fe
Diffstat (limited to 'asdctool')
-rw-r--r--asdctool/pom.xml1510
-rw-r--r--asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/attributes/version.rb3
-rw-r--r--asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/recipes/03-schemaCreation.rb8
-rw-r--r--asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb41
-rw-r--r--asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/elasticsearch.yml.erb11
-rw-r--r--asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java (renamed from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java)18
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java11
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java24
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java (renamed from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java)23
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java61
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java (renamed from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java)37
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java83
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java (renamed from asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMockTest.java)41
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java176
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java813
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java56
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java43
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java13
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java26
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java19
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java17
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java3
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java13
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java10
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java20
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java4
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java89
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java105
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java77
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java3
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java36
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java23
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java87
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java7
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java (renamed from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java)30
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java3
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java22
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java6
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java178
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java98
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java27
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java2
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java49
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java9
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java108
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java186
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java187
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java137
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java139
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java153
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java147
-rw-r--r--asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java14
-rw-r--r--asdctool/src/main/resources/application-context.xml8
-rw-r--r--asdctool/src/main/resources/config/configuration.yaml100
-rw-r--r--asdctool/src/main/resources/config/dataTypes.yml202
-rw-r--r--asdctool/src/main/resources/config/elasticsearch.yml393
-rw-r--r--asdctool/src/main/resources/config/groupTypes.yml6
-rw-r--r--asdctool/src/main/resources/config/janusgraph.properties6
-rw-r--r--asdctool/src/main/resources/config/tmp.trustbin0 -> 192298 bytes
-rw-r--r--asdctool/src/main/resources/elasticsearch.yml399
-rw-r--r--asdctool/src/main/resources/es-resources/README.txt43
-rw-r--r--asdctool/src/main/resources/es-resources/auditMappings.txt169
-rw-r--r--asdctool/src/main/resources/es-resources/audit_migration_1602.py132
-rw-r--r--asdctool/src/main/resources/es-resources/config_properties.py11
-rw-r--r--asdctool/src/main/resources/es-resources/file_utils.py21
-rw-r--r--asdctool/src/main/resources/es-resources/index_ops.py151
-rw-r--r--asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt8
-rw-r--r--asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt14
-rw-r--r--asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt9
-rw-r--r--asdctool/src/main/resources/es-resources/types/distributionengineevent.txt13
-rw-r--r--asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt16
-rw-r--r--asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt12
-rw-r--r--asdctool/src/main/resources/es-resources/types/resourceadminevent.txt21
-rw-r--r--asdctool/src/main/resources/es-resources/types/useraccessevent.txt10
-rw-r--r--asdctool/src/main/resources/es-resources/types/useradminevent.txt20
-rw-r--r--asdctool/src/main/resources/scripts/esToCassandraMigration.sh29
-rw-r--r--asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh29
-rw-r--r--asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh29
-rw-r--r--asdctool/src/main/resources/scripts/getConsumers.sh37
-rw-r--r--asdctool/src/main/resources/scripts/python/user/exportUsers.py3
-rw-r--r--asdctool/src/main/resources/scripts/python/user/importUsers.py6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java2
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/CLIToolTest.java10
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/SpringCLIToolTest.java61
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfigurationTest.java19
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfigurationTest.java43
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java97
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMockTest.java69
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfigurationTest.java63
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java28
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRowTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentRowTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java371
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfigTest.java90
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java2
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ProductLogicTest.java3
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java2
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGeneratorTest.java37
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandlerTest.java3
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java10
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java9
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java18
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java8
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuterTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutorTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuterTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuterTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutorTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuterTest.java9
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java36
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTaskTest.java5
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTaskTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerHelper.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java22
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/main/ExportImportMenuTest.java272
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java10
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java15
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java4
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java24
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigrationTest.java2
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigrationTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigrationTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java9
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigrationTest.java6
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigrationTest.java231
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigrationTest.java213
-rw-r--r--asdctool/src/test/java/org/openecomp/sdc/asdctool/utils/ReportWriterTest.java1
155 files changed, 3554 insertions, 5353 deletions
diff --git a/asdctool/pom.xml b/asdctool/pom.xml
index c74c4d6c8a..8c388252d2 100644
--- a/asdctool/pom.xml
+++ b/asdctool/pom.xml
@@ -1,752 +1,760 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <artifactId>asdctool</artifactId>
- <packaging>jar</packaging>
-
- <parent>
- <groupId>org.openecomp.sdc</groupId>
- <artifactId>sdc-main</artifactId>
- <version>1.6.0-SNAPSHOT</version>
- </parent>
-
- <dependencies>
-
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>${guava.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- ASDC dependencies -->
- <dependency>
- <groupId>org.openecomp.sdc.be</groupId>
- <artifactId>common-be</artifactId>
- <version>${project.version}</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.openecomp.sdc</groupId>
- <artifactId>common-app-api</artifactId>
- <version>${project.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.openecomp.sdc.be</groupId>
- <artifactId>catalog-dao</artifactId>
- <version>${project.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.openecomp.sdc.be</groupId>
- <artifactId>catalog-model</artifactId>
- <version>${project.version}</version>
- <scope>compile</scope>
- </dependency>
-
-
- <dependency>
- <groupId>org.openecomp.sdc</groupId>
- <artifactId>catalog-be</artifactId>
- <version>${project.version}</version>
-
- <!-- Comment Out in order to debug in eclipse -->
- <classifier>classes</classifier>
-
- <exclusions>
- <exclusion>
- <groupId>org.openecomp.ecompsdkos</groupId>
- <artifactId>epsdk-fw</artifactId>
- </exclusion>
-
- <exclusion>
- <groupId>org.onap.sdc.common</groupId>
- <artifactId>onap-common-lib</artifactId>
- </exclusion>
-
- <exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>cambriaClient</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>dmaapClient</artifactId>
- </exclusion>
- <exclusion>
- <artifactId>slf4j-log4j12</artifactId>
- <groupId>org.slf4j</groupId>
- </exclusion>
-
- </exclusions>
-
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.onap.portal.sdk</groupId>
- <artifactId>epsdk-fw</artifactId>
- <version>${ecomp.version}</version>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>cambriaClient</artifactId>
- </exclusion>
- <exclusion>
- <groupId>com.att.nsa</groupId>
- <artifactId>dmaapClient</artifactId>
- </exclusion>
- <exclusion>
- <artifactId>slf4j-log4j12</artifactId>
- <groupId>org.slf4j</groupId>
- </exclusion>
- </exclusions>
- </dependency>
-
-
-
- <!-- ASDC dependencies end -->
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-server</artifactId>
- <version>${jetty.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-util</artifactId>
- <version>${jetty.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- listen to file changes -->
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-jci-core</artifactId>
- <version>${commons-jci-core.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- <version>1.4</version>
- </dependency>
-
- <dependency>
- <groupId>org.eclipse.jetty</groupId>
- <artifactId>jetty-servlet</artifactId>
- <version>${jetty.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.glassfish.jersey.core</groupId>
- <artifactId>jersey-server</artifactId>
- <version>${jersey-bom.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.glassfish.jersey.containers</groupId>
- <artifactId>jersey-container-servlet-core</artifactId>
- <version>${jersey-bom.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.glassfish.jersey.containers</groupId>
- <artifactId>jersey-container-jetty-http</artifactId>
- <version>${jersey-bom.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.glassfish.jersey.media</groupId>
- <artifactId>jersey-media-moxy</artifactId>
- <version>${jersey-bom.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.glassfish.jersey.media</groupId>
- <artifactId>jersey-media-multipart</artifactId>
- <version>2.14</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- slf4j + logback -->
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>${slf4j-api.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>${logback.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-core</artifactId>
- <version>${logback.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- groovy -->
- <dependency>
- <groupId>org.codehaus.groovy</groupId>
- <artifactId>groovy</artifactId>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-configuration2</artifactId>
- <version>${commons-configuration}</version>
- </dependency>
-
- <dependency>
- <groupId>org.janusgraph</groupId>
- <artifactId>janusgraph-core</artifactId>
- <version>${janusgraph.version}</version>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- <exclusion>
- <artifactId>commons-collections</artifactId>
- <groupId>commons-collections</groupId>
- </exclusion>
- <exclusion>
- <artifactId>groovy</artifactId>
- <groupId>org.codehaus.groovy</groupId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.thrift</groupId>
- <artifactId>libthrift</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>${json-simple.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.janusgraph</groupId>
- <artifactId>janusgraph-cassandra</artifactId>
- <version>${janusgraph.version}</version>
- <scope>compile</scope>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- <version>${commons-logging}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>commons-codec</groupId>
- <artifactId>commons-codec</artifactId>
- <version>${commons-codec}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-databind</artifactId>
- <version>${jackson.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- Explicitly specified in order to override older version included by epsdk-fw -->
- <dependency>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-annotations</artifactId>
- <version>${jackson-annotations.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>${httpclient.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpcore</artifactId>
- <version>${httpcore.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- Spring 4 dependencies -->
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-core</artifactId>
- <version>${spring.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-context</artifactId>
- <version>${spring.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-expression</artifactId>
- <version>${spring.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-beans</artifactId>
- <version>${spring.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-aop</artifactId>
- <version>${spring.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- Spring 4 dependencies end -->
- <!-- JavaConfig need this library -->
- <dependency>
- <groupId>cglib</groupId>
- <artifactId>cglib</artifactId>
- <version>3.2.4</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.yaml</groupId>
- <artifactId>snakeyaml</artifactId>
- <version>${snakeyaml.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.functionaljava</groupId>
- <artifactId>functionaljava</artifactId>
- <version>${functionaljava.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.elasticsearch</groupId>
- <artifactId>elasticsearch</artifactId>
- <version>${elastic-search.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>com.fasterxml.jackson.dataformat</groupId>
- <artifactId>jackson-dataformat-yaml</artifactId>
- <version>${jackson.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- CASSANDRA -->
- <dependency>
- <groupId>com.datastax.cassandra</groupId>
- <artifactId>cassandra-driver-core</artifactId>
- <version>${cassandra.driver.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>com.datastax.cassandra</groupId>
- <artifactId>cassandra-driver-mapping</artifactId>
- <version>${cassandra.driver.version}</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.thrift</groupId>
- <artifactId>libthrift</artifactId>
- <version>${libthrift.version}</version>
- </dependency>
-
- <!-- CASSANDRA END -->
-
- <!-- OPEN CSV -->
- <dependency>
- <groupId>com.opencsv</groupId>
- <artifactId>opencsv</artifactId>
- <version>4.0</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi</artifactId>
- <version>${apache-poi.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.jdom</groupId>
- <artifactId>jdom</artifactId>
- <version>2.0.2</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- Temporary, till building the populate task which adding all components
- to cache. We will use Serialization Utils. -->
- <dependency>
- <groupId>de.ruedigermoeller</groupId>
- <artifactId>fst</artifactId>
- <version>2.47</version>
- <scope>compile</scope>
- </dependency>
-
- <!-- testing -->
- <dependency>
- <groupId>org.hamcrest</groupId>
- <artifactId>hamcrest-all</artifactId>
- <version>${hamcrest-all.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>${junit.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.testng</groupId>
- <artifactId>testng</artifactId>
- <version>${testng.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>com.google.code.bean-matchers</groupId>
- <artifactId>bean-matchers</artifactId>
- <version>${bean-matchers.version}</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>com.github.stefanbirkner</groupId>
- <artifactId>system-rules</artifactId>
- <version>1.19.0</version>
- <scope>test</scope>
- </dependency>
-
- <!-- testing end -->
-
- <dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty-all</artifactId>
- </dependency>
-
- <dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty-handler</artifactId>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-module-junit4</artifactId>
- <version>2.0.2</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <finalName>${project.artifactId}-${project.version}-jar-with-dependencies</finalName>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <executions>
- <execution>
- <configuration>
- <finalName>sdctool</finalName>
- <appendAssemblyId>false</appendAssemblyId>
- <descriptors>
- <descriptor>${project.basedir}/tarball.xml</descriptor>
- </descriptors>
- </configuration>
- <id>assemble-file</id>
- <phase>verify</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-shade-plugin</artifactId>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>shade</goal>
- </goals>
- <configuration>
- <filters>
- <filter>
- <artifact>org.openecomp.sdc:*</artifact>
- <excludes>
- <exclude>**/elasticsearch.yml</exclude>
- </excludes>
- </filter>
- <filter>
- <artifact>*:*</artifact>
- <excludes>
- <exclude>META-INF/*.SF</exclude>
- <exclude>META-INF/*.DSA</exclude>
- <exclude>META-INF/*.RSA</exclude>
- </excludes>
- </filter>
- </filters>
- <transformers>
- <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
- </transformers>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>ru.yaal.maven</groupId>
- <artifactId>write-text-files-maven-plugin</artifactId>
- <configuration>
- <charset>UTF-8</charset>
- <files>
- <file>
- <path>
- ${project.basedir}\sdc-cassandra-init\chef-repo\cookbooks\cassandra-actions\attributes\version.rb
- </path>
- <lines>
- <line>normal['version'] ="${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}"</line>
- </lines>
- </file>
- </files>
- </configuration>
- <executions>
- <execution>
- <id>write-text-files</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>write-text-files</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-resources-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-tosca-folder</id>
- <!-- here the phase you need -->
- <phase>compile</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${project.parent.basedir}/asdctool/tosca</outputDirectory>
- <resources>
- <resource>
- <directory>${project.parent.basedir}/catalog-be/src/main/resources/import/tosca</directory>
- <filtering>true</filtering>
- </resource>
- </resources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.github.sylvainlaurent.maven</groupId>
- <artifactId>yaml-json-validator-maven-plugin</artifactId>
- <executions>
- <execution>
- <id>validate</id>
- <phase>validate</phase>
- <goals>
- <goal>validate</goal>
- </goals>
- <configuration>
- <validationSets>
- <validationSet>
- <includes>
- <include>src/main/resources/**/*.y*ml</include>
- <include>src/test/resources/**/*.y*ml</include>
- </includes>
- </validationSet>
- <validationSet>
- <includes>
- <include>src/main/resources/**/*.json</include>
- <include>src/test/resources/**/*.json</include>
- </includes>
- <excludes>
- <exclude>src/test/resources/graphError.json</exclude>
- </excludes>
- </validationSet>
- </validationSets>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <profiles>
- <profile>
- <id>docker</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-resources-plugin</artifactId>
- <executions>
- <execution>
- <id>copy-sdctool</id>
- <phase>verify</phase>
- <goals>
- <goal>copy-resources</goal>
- </goals>
- <configuration>
- <outputDirectory>${basedir}/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/files/default</outputDirectory>
- <resources>
- <resource>
- <directory>${project.parent.basedir}/asdctool/target</directory>
- <includes>
- <include>sdctool.tar</include>
- </includes>
- </resource>
- </resources>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>io.fabric8</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <configuration>
- <apiVersion>1.23</apiVersion>
- <registry>nexus3.onap.org:10001</registry>
- <authConfig>
- <pull>
- <username>docker</username>
- <password>docker</password>
- </pull>
- </authConfig>
- <images>
- <!-- Build cassandra-init image -->
- 
- </images>
- </configuration>
- <executions>
- <execution>
- <id>clean-images</id>
- <phase>pre-clean</phase>
- <goals>
- <goal>remove</goal>
- </goals>
- <configuration>
- <removeAll>true</removeAll>
- 
- </configuration>
- </execution>
-
- <execution>
- <id>generate-images</id>
- <phase>install</phase>
- <goals>
- <goal>build</goal>
- </goals>
- </execution>
-
- <execution>
- <id>push-images</id>
- <phase>deploy</phase>
- <goals>
- <goal>push</goal>
- </goals>
- <configuration>
- 
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <artifactId>asdctool</artifactId>
+ <packaging>jar</packaging>
+
+ <parent>
+ <groupId>org.openecomp.sdc</groupId>
+ <artifactId>sdc-main</artifactId>
+ <version>1.6.0-SNAPSHOT</version>
+ </parent>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- ASDC dependencies -->
+ <dependency>
+ <groupId>org.openecomp.sdc.be</groupId>
+ <artifactId>common-be</artifactId>
+ <version>${project.version}</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.openecomp.sdc</groupId>
+ <artifactId>common-app-api</artifactId>
+ <version>${project.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.openecomp.sdc.be</groupId>
+ <artifactId>catalog-dao</artifactId>
+ <version>${project.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.openecomp.sdc.be</groupId>
+ <artifactId>catalog-model</artifactId>
+ <version>${project.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+
+ <dependency>
+ <groupId>org.openecomp.sdc</groupId>
+ <artifactId>catalog-be</artifactId>
+ <version>${project.version}</version>
+
+ <!-- Comment Out in order to debug in eclipse -->
+ <classifier>classes</classifier>
+
+ <exclusions>
+ <exclusion>
+ <groupId>org.openecomp.ecompsdkos</groupId>
+ <artifactId>epsdk-fw</artifactId>
+ </exclusion>
+
+ <exclusion>
+ <groupId>org.onap.sdc.common</groupId>
+ <artifactId>onap-common-lib</artifactId>
+ </exclusion>
+
+ <exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>cambriaClient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>dmaapClient</artifactId>
+ </exclusion>
+ <exclusion>
+ <artifactId>slf4j-log4j12</artifactId>
+ <groupId>org.slf4j</groupId>
+ </exclusion>
+
+ </exclusions>
+
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.onap.portal.sdk</groupId>
+ <artifactId>epsdk-fw</artifactId>
+ <version>${ecomp.version}</version>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>cambriaClient</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.att.nsa</groupId>
+ <artifactId>dmaapClient</artifactId>
+ </exclusion>
+ <exclusion>
+ <artifactId>slf4j-log4j12</artifactId>
+ <groupId>org.slf4j</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+
+ <!-- ASDC dependencies end -->
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-server</artifactId>
+ <version>${jetty.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>${jetty.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- listen to file changes -->
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-jci-core</artifactId>
+ <version>${commons-jci-core.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.4</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>jetty-servlet</artifactId>
+ <version>${jetty.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.glassfish.jersey.core</groupId>
+ <artifactId>jersey-server</artifactId>
+ <version>${jersey-bom.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.glassfish.jersey.containers</groupId>
+ <artifactId>jersey-container-servlet-core</artifactId>
+ <version>${jersey-bom.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.glassfish.jersey.containers</groupId>
+ <artifactId>jersey-container-jetty-http</artifactId>
+ <version>${jersey-bom.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.glassfish.jersey.media</groupId>
+ <artifactId>jersey-media-moxy</artifactId>
+ <version>${jersey-bom.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.glassfish.jersey.media</groupId>
+ <artifactId>jersey-media-multipart</artifactId>
+ <version>${jersey-bom.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- slf4j + logback -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4j-api.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <version>${logback.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-core</artifactId>
+ <version>${logback.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- groovy -->
+ <dependency>
+ <groupId>org.codehaus.groovy</groupId>
+ <artifactId>groovy</artifactId>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-configuration2</artifactId>
+ <version>${commons-configuration}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.janusgraph</groupId>
+ <artifactId>janusgraph-core</artifactId>
+ <version>${janusgraph.version}</version>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <artifactId>commons-collections</artifactId>
+ <groupId>commons-collections</groupId>
+ </exclusion>
+ <exclusion>
+ <artifactId>groovy</artifactId>
+ <groupId>org.codehaus.groovy</groupId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>com.googlecode.json-simple</groupId>
+ <artifactId>json-simple</artifactId>
+ <version>${json-simple.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.janusgraph</groupId>
+ <artifactId>janusgraph-cassandra</artifactId>
+ <version>${janusgraph.version}</version>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>${commons-codec}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ <version>${jackson.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Explicitly specified in order to override older version included by epsdk-fw -->
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-annotations</artifactId>
+ <version>${jackson-annotations.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>${gson.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>${httpclient.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpcore</artifactId>
+ <version>${httpcore.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Spring 4 dependencies -->
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ <version>${spring.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ <version>${spring.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-expression</artifactId>
+ <version>${spring.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-beans</artifactId>
+ <version>${spring.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-aop</artifactId>
+ <version>${spring.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Spring 4 dependencies end -->
+ <!-- JavaConfig need this library -->
+ <dependency>
+ <groupId>cglib</groupId>
+ <artifactId>cglib</artifactId>
+ <version>3.2.4</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>${snakeyaml.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.functionaljava</groupId>
+ <artifactId>functionaljava</artifactId>
+ <version>${functionaljava.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.elasticsearch</groupId>
+ <artifactId>elasticsearch</artifactId>
+ <version>${elastic-search.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.fasterxml.jackson.dataformat</groupId>
+ <artifactId>jackson-dataformat-yaml</artifactId>
+ <version>${jackson.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- CASSANDRA -->
+ <dependency>
+ <groupId>com.datastax.cassandra</groupId>
+ <artifactId>cassandra-driver-core</artifactId>
+ <version>${cassandra.driver.version}</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.datastax.cassandra</groupId>
+ <artifactId>cassandra-driver-mapping</artifactId>
+ <version>${cassandra.driver.version}</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>${libthrift.version}</version>
+ </dependency>
+
+ <!-- CASSANDRA END -->
+
+ <!-- OPEN CSV -->
+ <dependency>
+ <groupId>com.opencsv</groupId>
+ <artifactId>opencsv</artifactId>
+ <version>4.0</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.poi</groupId>
+ <artifactId>poi</artifactId>
+ <version>${apache-poi.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.jdom</groupId>
+ <artifactId>jdom</artifactId>
+ <version>2.0.2</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- Temporary, till building the populate task which adding all components
+ to cache. We will use Serialization Utils. -->
+ <dependency>
+ <groupId>de.ruedigermoeller</groupId>
+ <artifactId>fst</artifactId>
+ <version>2.47</version>
+ <scope>compile</scope>
+ </dependency>
+
+ <!-- testing -->
+ <dependency>
+ <groupId>org.hamcrest</groupId>
+ <artifactId>hamcrest-all</artifactId>
+ <version>${hamcrest-all.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>${junit.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <version>${testng.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.code.bean-matchers</groupId>
+ <artifactId>bean-matchers</artifactId>
+ <version>${bean-matchers.version}</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.github.stefanbirkner</groupId>
+ <artifactId>system-rules</artifactId>
+ <version>1.19.0</version>
+ <scope>test</scope>
+ </dependency>
+
+ <!-- testing end -->
+
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty-all</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty-handler</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-module-junit4</artifactId>
+ <version>2.0.2</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>${project.artifactId}-${project.version}-jar-with-dependencies</finalName>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <executions>
+ <execution>
+ <configuration>
+ <finalName>sdctool</finalName>
+ <appendAssemblyId>false</appendAssemblyId>
+ <descriptors>
+ <descriptor>${project.basedir}/tarball.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <id>assemble-file</id>
+ <phase>verify</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <filters>
+ <filter>
+ <artifact>org.openecomp.sdc:*</artifact>
+ <excludes>
+ <exclude>**/elasticsearch.yml</exclude>
+ </excludes>
+ </filter>
+ <filter>
+ <artifact>*:*</artifact>
+ <excludes>
+ <exclude>META-INF/*.SF</exclude>
+ <exclude>META-INF/*.DSA</exclude>
+ <exclude>META-INF/*.RSA</exclude>
+ </excludes>
+ </filter>
+ </filters>
+ <transformers>
+ <transformer
+ implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+ </transformers>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>ru.yaal.maven</groupId>
+ <artifactId>write-text-files-maven-plugin</artifactId>
+ <configuration>
+ <charset>UTF-8</charset>
+ <files>
+ <file>
+ <path>
+ ${project.basedir}\sdc-cassandra-init\chef-repo\cookbooks\cassandra-actions\attributes\version.rb
+ </path>
+ <lines>
+ <line>normal['version']
+ ="${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.incrementalVersion}"
+ </line>
+ </lines>
+ </file>
+ </files>
+ </configuration>
+ <executions>
+ <execution>
+ <id>write-text-files</id>
+ <phase>prepare-package</phase>
+ <goals>
+ <goal>write-text-files</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-tosca-folder</id>
+ <!-- here the phase you need -->
+ <phase>compile</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.parent.basedir}/asdctool/tosca</outputDirectory>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/catalog-be/src/main/resources/import/tosca
+ </directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>com.github.sylvainlaurent.maven</groupId>
+ <artifactId>yaml-json-validator-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>validate</id>
+ <phase>validate</phase>
+ <goals>
+ <goal>validate</goal>
+ </goals>
+ <configuration>
+ <validationSets>
+ <validationSet>
+ <includes>
+ <include>src/main/resources/**/*.y*ml</include>
+ <include>src/test/resources/**/*.y*ml</include>
+ </includes>
+ </validationSet>
+ <validationSet>
+ <includes>
+ <include>src/main/resources/**/*.json</include>
+ <include>src/test/resources/**/*.json</include>
+ </includes>
+ <excludes>
+ <exclude>src/test/resources/graphError.json</exclude>
+ </excludes>
+ </validationSet>
+ </validationSets>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>docker</id>
+ <activation>
+ <activeByDefault>false</activeByDefault>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-resources-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-sdctool</id>
+ <phase>verify</phase>
+ <goals>
+ <goal>copy-resources</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>
+ ${basedir}/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/files/default
+ </outputDirectory>
+ <resources>
+ <resource>
+ <directory>${project.parent.basedir}/asdctool/target</directory>
+ <includes>
+ <include>sdctool.tar</include>
+ </includes>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>io.fabric8</groupId>
+ <artifactId>docker-maven-plugin</artifactId>
+ <configuration>
+ <apiVersion>1.23</apiVersion>
+ <registry>nexus3.onap.org:10001</registry>
+ <authConfig>
+ <pull>
+ <username>docker</username>
+ <password>docker</password>
+ </pull>
+ </authConfig>
+ <images>
+ <!-- Build cassandra-init image -->
+ 
+ </images>
+ </configuration>
+ <executions>
+ <execution>
+ <id>clean-images</id>
+ <phase>pre-clean</phase>
+ <goals>
+ <goal>remove</goal>
+ </goals>
+ <configuration>
+ <removeAll>true</removeAll>
+ 
+ </configuration>
+ </execution>
+
+ <execution>
+ <id>generate-images</id>
+ <phase>install</phase>
+ <goals>
+ <goal>build</goal>
+ </goals>
+ </execution>
+
+ <execution>
+ <id>push-images</id>
+ <phase>deploy</phase>
+ <goals>
+ <goal>push</goal>
+ </goals>
+ <configuration>
+ 
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
diff --git a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/attributes/version.rb b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/attributes/version.rb
index 522d2270b4..919b626fa5 100644
--- a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/attributes/version.rb
+++ b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/attributes/version.rb
@@ -1 +1,2 @@
-normal['version'] ="1.6.0"
+normal['version']
+ ="1.6.0"
diff --git a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/recipes/03-schemaCreation.rb b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/recipes/03-schemaCreation.rb
index b046b0da51..72a144634c 100644
--- a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/recipes/03-schemaCreation.rb
+++ b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/recipes/03-schemaCreation.rb
@@ -49,14 +49,6 @@ template "/tmp/sdctool/config/configuration.yaml" do
end
-template "/tmp/sdctool/config/elasticsearch.yml" do
- sensitive true
- source "elasticsearch.yml.erb"
- mode 0755
- variables({
- :elastic_ip => node['Nodes']['ES']
- })
-end
bash "executing-schema-creation" do
code <<-EOH
diff --git a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
index d144b07b2b..badcd6f1ea 100644
--- a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
+++ b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/configuration.yaml.erb
@@ -26,7 +26,7 @@ beProtocol: http
beSslPort: <%= @ssl_port %>
version: 1.0
released: 2012-11-30
-toscaConformanceLevel: 9.0
+toscaConformanceLevel: 11.0
minToscaConformanceLevel: 3.0
janusGraphCfgFile: <%= @janusgraph_Path %>/janusgraph.properties
@@ -40,7 +40,7 @@ janusGraphHealthCheckReadTimeout: 1
# The interval to try and reconnect to Elasticsearch when it is down during ASDC startup:
-esReconnectIntervalInSeconds: 3
+
uebHealthCheckReconnectIntervalInSeconds: 15
uebHealthCheckReadTimeout: 4
@@ -90,28 +90,8 @@ cassandraConfig:
- { name: sdccomponent, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['<%= @DC_NAME %>','<%= @rep_factor %>']}
- { name: sdcrepository, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['<%= @DC_NAME %>','<%= @rep_factor %>']}
-#Application-specific settings of ES
-elasticSearch:
- # Mapping of index prefix to time-based frame. For example, if below is configured:
- #
- # - indexPrefix: auditingevents
- # creationPeriod: minute
- #
- # then ES object of type which is mapped to "auditingevents-*" template, and created on 2015-12-23 13:24:54, will enter "auditingevents-2015-12-23-13-24" index.
- # Another object created on 2015-12-23 13:25:54, will enter "auditingevents-2015-12-23-13-25" index.
- # If creationPeriod: month, both of the above will enter "auditingevents-2015-12" index.
- #
- # PLEASE NOTE: the timestamps are created in UTC/GMT timezone! This is needed so that timestamps will be correctly presented in Kibana.
- #
- # Legal values for creationPeriod - year, month, day, hour, minute, none (meaning no time-based behaviour).
- #
- # If no creationPeriod is configured for indexPrefix, default behavour is creationPeriod: month.
-
- indicesTimeFrequency:
- - indexPrefix: auditingevents
- creationPeriod: month
- - indexPrefix: monitoring_events
- creationPeriod: month
+
+
artifactTypes:
- CHEF
@@ -285,7 +265,10 @@ systemMonitoring:
isProxy: false
probeIntervalInSeconds: 15
-defaultHeatArtifactTimeoutMinutes: 60
+heatArtifactDeploymentTimeout:
+ defaultMinutes: 30
+ minMinutes: 1
+ maxMinutes: 120
serviceDeploymentArtifacts:
CONTROLLER_BLUEPRINT_ARCHIVE:
@@ -608,10 +591,6 @@ resourceInformationalArtifacts:
resourceInformationalDeployedArtifacts:
-requirementsToFulfillBeforeCert:
-
-capabilitiesToConsumeBeforeCert:
-
unLoggedUrls:
- /sdc2/rest/healthCheck
@@ -764,9 +743,7 @@ dmaapConsumerConfiguration:
username: user
password:
-dmeConfiguration:
- dme2Search: DME2SEARCH
- dme2Resolve: DME2RESOLVE
+
excludedPolicyTypesMapping:
# VF:
diff --git a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/elasticsearch.yml.erb b/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/elasticsearch.yml.erb
deleted file mode 100644
index 79d11f4610..0000000000
--- a/asdctool/sdc-cassandra-init/chef-repo/cookbooks/cassandra-actions/templates/default/elasticsearch.yml.erb
+++ /dev/null
@@ -1,11 +0,0 @@
-discovery.zen.ping.multicast.enabled: false
-discovery.zen.ping.unicast.enabled: true
-node.name: asdc-01
-cluster.name: elasticsearch
-node.master: false
-node.data: false
-http.cors.enabled: true
-path.home: "/var/lib/jetty/config"
-elasticSearch.transportclient: true
-transport.client.initial_nodes:
- - <%= @elastic_ip %>:9300
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java b/asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java
index a22e862ae7..3f66031b17 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java
+++ b/asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java
@@ -2,14 +2,14 @@
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,14 +18,8 @@
* ============LICENSE_END=========================================================
*/
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package com.att.nsa.cambria.client;
-import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
-import org.openecomp.sdc.common.api.HealthCheckInfo;
-
-public class EsHealthCheckDaoMock implements IEsHealthCheckDao {
- @Override
- public HealthCheckInfo.HealthCheckStatus getClusterHealthStatus() {
- return HealthCheckInfo.HealthCheckStatus.UP;
- }
+public class CambriaConsumer {
+ //mock for bean init
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
index f7aaa1e0d9..b433357db3 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
@@ -23,15 +23,12 @@ package org.openecomp.sdc.asdctool;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Hello world!
*
*/
public class App {
- private static Logger log = LoggerFactory.getLogger(App.class);
public static void main(String[] args) {
String asdcToolPort = "8087";
@@ -55,12 +52,12 @@ public class App {
try {
jettyServer.start();
- log.info("Server was started on port {}", asdcToolPort);
+ System.out.println("Server was started on port " + asdcToolPort);
jettyServer.join();
} catch (Exception e) {
- log.info("Server failed to start - {}", e);
+ e.printStackTrace();
System.exit(1);
} finally {
jettyServer.destroy();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
index 44bfc536b6..378b81aaf2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool;
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.Property;
import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
import org.openecomp.sdc.common.log.wrappers.Logger;
import javax.ws.rs.core.Response;
@@ -38,7 +38,7 @@ public class Utils {
private static Logger log = Logger.getLogger(Utils.class.getName());
- public final static String NEW_LINE = System.getProperty("line.separator");
+ public static String NEW_LINE = System.getProperty("line.separator");
public static Response buildOkResponse(
/*
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
index 6cb6a5bb14..d4ebff6005 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
@@ -20,7 +20,12 @@
package org.openecomp.sdc.asdctool.cli;
-import org.apache.commons.cli.*;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
index abfd10547b..99234365da 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
@@ -30,12 +30,10 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
import org.openecomp.sdc.config.CatalogBESpringConfig;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
@Configuration
@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@@ -51,13 +49,4 @@ public class ArtifactUUIDFixConfiguration {
return new ArtifactUuidFix(janusGraphDao, toscaOperationFacade, toscaExportHandler, artifactCassandraDao, csarUtils);
}
- @Bean(name = "elasticsearchConfig")
- public PropertiesFactoryBean mapper() {
- String configHome = System.getProperty("config.home");
- PropertiesFactoryBean bean = new PropertiesFactoryBean();
- bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
- return bean;
- }
-
-
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
index d115f9cbb0..975066f694 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
@@ -1,6 +1,9 @@
-/*
- * Copyright © 2016-2018 AT&T
- *
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2016-2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
@@ -12,6 +15,9 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ * ============LICENSE_END=========================================================
+ * Modifications copyright (c) 2019 Nokia
+ * ================================================================================
*/
package org.openecomp.sdc.asdctool.configuration;
@@ -21,6 +27,8 @@ import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import java.io.File;
+
public class ConfigurationUploader {
public static void uploadConfigurationFiles(String appConfigDir) {
@@ -28,5 +36,15 @@ public class ConfigurationUploader {
new ConfigurationManager(configurationSource);
ExternalConfiguration.setAppVersion(ConfigurationManager.getConfigurationManager().getConfiguration().getAppVersion());
System.setProperty("config.home", appConfigDir);
+ System.setProperty("artifactgenerator.config", buildArtifactGeneratorPath(appConfigDir));
+ }
+
+ private static String buildArtifactGeneratorPath(String appConfigDir) {
+ StringBuilder artifactGeneratorPath = new StringBuilder(appConfigDir);
+ if(!appConfigDir.endsWith(File.separator)){
+ artifactGeneratorPath.append(File.separator);
+ }
+ artifactGeneratorPath.append(ConfigurationManager.getConfigurationManager().getConfiguration().getArtifactGeneratorConfig());
+ return artifactGeneratorPath.toString();
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
index 32c37a36b3..ee4d2c1be2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
@@ -21,7 +21,6 @@
package org.openecomp.sdc.asdctool.configuration;
import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
-import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
@@ -30,12 +29,10 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
import org.openecomp.sdc.config.CatalogBESpringConfig;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
@Configuration
@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@@ -51,12 +48,5 @@ public class CsarGeneratorConfiguration {
artifactCassandraDao, toscaExportHandler);
}
- @Bean(name = "elasticsearchConfig")
- public PropertiesFactoryBean mapper() {
- String configHome = System.getProperty("config.home");
- PropertiesFactoryBean bean = new PropertiesFactoryBean();
- bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
- return bean;
- }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
index 51956732f5..1b09b2a57e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
@@ -26,22 +26,13 @@ import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTypeOperation;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
@Configuration
@Import({DAOSpringConfig.class, CatalogModelSpringConfig.class})
public class InternalToolConfiguration {
- @Bean(name = "elasticsearchConfig")
- public PropertiesFactoryBean mapper() {
- String configHome = System.getProperty("config.home");
- PropertiesFactoryBean bean = new PropertiesFactoryBean();
- bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
- return bean;
- }
@Bean
public DeleteComponentHandler deleteComponentHandler(
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
index 8c70ad3407..0cfd894b39 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
@@ -2,7 +2,7 @@
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -20,21 +20,22 @@
package org.openecomp.sdc.asdctool.configuration;
-import org.openecomp.sdc.be.dao.config.JanusGraphSpringConfig;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
-import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
+import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Import;
@Configuration
-@Import({JanusGraphSpringConfig.class})
-public class GetConsumersConfiguration {
+public class SdcSchemaFileImportConfiguration {
- @Bean("consumer-operation")
- public ConsumerOperation consumerOperation(JanusGraphGenericDao janusGraphGenericDao) {
- return new ConsumerOperation(janusGraphGenericDao);
- }
+ @Bean(name = "cassandra-client")
+ public CassandraClient cassandraClient() {
+ return new CassandraClient();
+ }
+ @Bean(name = "sdc-schema-files-cassandra-dao")
+ public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao() {
+ return new SdcSchemaFilesCassandraDao(cassandraClient());
+ }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
deleted file mode 100644
index 75283f15ba..0000000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import fj.data.Either;
-import org.openecomp.sdc.be.dao.api.ICatalogDAO;
-import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-
-import java.util.List;
-
-public class ESCatalogDAOMock implements ICatalogDAO {
-
- @Override
- public void addToIndicesMap(String typeName, String indexName) {
-
- }
-
- @Override
- public void writeArtifact(ESArtifactData artifactData) {
-
- }
-
- @Override
- public Either<ESArtifactData, ResourceUploadStatus> getArtifact(String id) {
- return null;
- }
-
- @Override
- public Either<List<ESArtifactData>, ResourceUploadStatus> getArtifacts(String[] ids) {
- return null;
- }
-
- @Override
- public void deleteArtifact(String id) {
-
- }
-
- @Override
- public void deleteAllArtifacts() {
-
- }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java
index fd68de24d2..92c4a7cf54 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java
@@ -2,7 +2,7 @@
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -18,30 +18,29 @@
* ============LICENSE_END=========================================================
*/
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package org.openecomp.sdc.asdctool.enums;
-import org.openecomp.sdc.be.dao.api.ICatalogDAO;
-import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
+public enum DistributionStatusEnum {
+ DISTRIBUTION_NOT_APPROVED("Distribution not approved"), DISTRIBUTION_APPROVED("Distribution approved"), DISTRIBUTED("Distributed"), DISTRIBUTION_REJECTED("Distribution rejected");
-@Configuration
-public class ElasticSearchMocksConfiguration {
+ private String value;
- @Bean("elasticsearch-client")
- public ElasticSearchClient elasticSearchClientMock() {
- return new ElasticSearchClientMock();
+ private DistributionStatusEnum(String value) {
+ this.value = value;
}
- @Bean("resource-dao")
- public ICatalogDAO esCatalogDAOMock() {
- return new ESCatalogDAOMock();
+ public String getValue() {
+ return value;
}
- @Bean("esHealthCheckDao")
- public IEsHealthCheckDao esHealthCheckDaoMock() {
- return new EsHealthCheckDaoMock();
+ public static DistributionStatusEnum findState(String state) {
+
+ for (DistributionStatusEnum distributionStatus : DistributionStatusEnum.values()) {
+ if (distributionStatus.name().equalsIgnoreCase(state) || distributionStatus.getValue().equalsIgnoreCase(state)) {
+ return distributionStatus;
+ }
+ }
+ return null;
}
-}
+} \ No newline at end of file
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java
new file mode 100644
index 0000000000..b29f5fba8d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java
@@ -0,0 +1,83 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.enums;
+
+public enum LifeCycleTransitionEnum {
+
+ CHECKOUT("checkout"),
+ CHECKIN("checkin"),
+ CERTIFICATION_REQUEST("certificationRequest"),
+ UNDO_CHECKOUT("undoCheckout"),
+ CANCEL_CERTIFICATION("cancelCertification"),
+ START_CERTIFICATION("startCertification"),
+ FAIL_CERTIFICATION("failCertification"),
+ CERTIFY("certify"),
+ DISTRIBUTE("distribute");
+
+ String displayName;
+
+ LifeCycleTransitionEnum(String displayName) {
+ this.displayName = displayName;
+ }
+
+ public String getDisplayName() {
+ return displayName;
+ }
+
+ public static LifeCycleTransitionEnum getFromDisplayName(String name) {
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CHECKOUT.getDisplayName())) {
+ return LifeCycleTransitionEnum.CHECKOUT;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CHECKIN.getDisplayName())) {
+ return LifeCycleTransitionEnum.CHECKIN;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CERTIFICATION_REQUEST.getDisplayName())) {
+ return LifeCycleTransitionEnum.CERTIFICATION_REQUEST;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.UNDO_CHECKOUT.getDisplayName())) {
+ return LifeCycleTransitionEnum.UNDO_CHECKOUT;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CANCEL_CERTIFICATION.getDisplayName())) {
+ return LifeCycleTransitionEnum.CANCEL_CERTIFICATION;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.START_CERTIFICATION.getDisplayName())) {
+ return LifeCycleTransitionEnum.START_CERTIFICATION;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.FAIL_CERTIFICATION.getDisplayName())) {
+ return LifeCycleTransitionEnum.FAIL_CERTIFICATION;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CERTIFY.getDisplayName())) {
+ return LifeCycleTransitionEnum.CERTIFY;
+ }
+ if (name.equalsIgnoreCase(LifeCycleTransitionEnum.DISTRIBUTE.getDisplayName())) {
+ return LifeCycleTransitionEnum.DISTRIBUTE;
+ } else
+ throw new IllegalArgumentException(name + " value does not match any of LifeCycleTransitionEnum values");
+ }
+
+ public static String valuesAsString() {
+ StringBuilder sb = new StringBuilder();
+ for (LifeCycleTransitionEnum op : LifeCycleTransitionEnum.values()) {
+ sb.append(op.getDisplayName()).append(" ");
+ }
+ return sb.toString();
+ }
+} \ No newline at end of file
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMockTest.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
index bd874c5ded..98fb95faa7 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMockTest.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
@@ -2,14 +2,14 @@
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,24 +18,27 @@
* ============LICENSE_END=========================================================
*/
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package org.openecomp.sdc.asdctool.enums;
-import org.junit.Test;
-import org.openecomp.sdc.common.api.HealthCheckInfo.HealthCheckStatus;
+public enum LifecycleStateEnum {
-public class EsHealthCheckDaoMockTest {
+ READY_FOR_CERTIFICATION,
- private EsHealthCheckDaoMock createTestSubject() {
- return new EsHealthCheckDaoMock();
- }
+ CERTIFICATION_IN_PROGRESS,
- @Test
- public void testGetClusterHealthStatus() throws Exception {
- EsHealthCheckDaoMock testSubject;
- HealthCheckStatus result;
+ CERTIFIED,
- // default test
- testSubject = createTestSubject();
- result = testSubject.getClusterHealthStatus();
- }
-}
+ NOT_CERTIFIED_CHECKIN,
+
+ NOT_CERTIFIED_CHECKOUT;
+
+ public static LifecycleStateEnum findState(String state) {
+
+ for (LifecycleStateEnum lifecycleStateEnum : LifecycleStateEnum.values()) {
+ if (lifecycleStateEnum.name().equals(state)) {
+ return lifecycleStateEnum;
+ }
+ }
+ return null;
+ }
+} \ No newline at end of file
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
index 471b54d4cb..1ab2b809c1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
@@ -20,7 +20,9 @@
package org.openecomp.sdc.asdctool.enums;
-import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.*;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.DATA_IMPORT_LIST;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.EMPTY_IMPORT_LIST;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.RELATIONSHIPS_TYPES_IMPORT_LIST;
public enum SchemaZipFileEnum {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
index 4dab15fa4b..4ad90b884c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
@@ -44,7 +44,16 @@ import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.DistributionStatusEnum;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupInstance;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.ToscaElement;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
@@ -52,7 +61,7 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.utils.ModelConverter;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.DAOArtifactData;
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaError;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
@@ -72,23 +81,23 @@ import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collections;
+import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
-import java.util.EnumMap;
import java.util.stream.Collectors;
@org.springframework.stereotype.Component("artifactUuidFix")
public class ArtifactUuidFix {
-
+
private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ";
- private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
+ private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
- private static final String UTF8 = "utf-8";
+ private static final String UTF8 = "utf-8";
private JanusGraphDao janusGraphDao;
private ToscaOperationFacade toscaOperationFacade;
@@ -249,7 +258,7 @@ public class ArtifactUuidFix {
vfLst.add(resource);
writeModuleResultToFile(writer, resource, service);
writer.flush();
-
+
}
janusGraphDao.commit();
}
@@ -274,7 +283,7 @@ public class ArtifactUuidFix {
Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
if ("distributed_only".equals(fixServices)) {
- hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name());
}
@@ -302,17 +311,17 @@ public class ArtifactUuidFix {
continue;
}
Service service = toscaElement.left().value();
-
+
String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME);
-
+
boolean isProblematic = isProblematicService(service, serviceName);
if (isProblematic) {
serviceList.add(service);
writeModuleResultToFile(writer, service, null);
writer.flush();
-
+
}
-
+
janusGraphDao.commit();
}
log.info("output file with list of services : {}", fileName);
@@ -326,9 +335,9 @@ public class ArtifactUuidFix {
}
private boolean isProblematicService( Service service, String serviceName) {
-
+
List<ComponentInstance> componentInstances = service.getComponentInstances();
-
+
if (componentInstances == null) {
log.info("No instances for service {} ", service.getUniqueId());
return false;
@@ -350,21 +359,21 @@ public class ArtifactUuidFix {
if(isCheckVFModules){
Optional<ArtifactDefinition> optionalVfModuleArtifact = deploymentArtifacts.values().stream()
.filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny();
-
- if(!optionalVfModuleArtifact.isPresent())
- return true;
-
- ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
- Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
- if(vfModulesEither.isRight()){
- log.error("Failed to parse vfModule for service {} status is {}", service.getUniqueId(), vfModulesEither.right().value());
- return true;
- }
- vfModules = vfModulesEither.left().value();
- if(vfModules == null || vfModules.isEmpty() ){
- log.info("vfModules empty for service {}", service.getUniqueId());
- return true;
- }
+
+ if(!optionalVfModuleArtifact.isPresent())
+ return true;
+
+ ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
+ Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
+ if(vfModulesEither.isRight()){
+ log.error("Failed to parse vfModule for service {} status is {}", service.getUniqueId(), vfModulesEither.right().value());
+ return true;
+ }
+ vfModules = vfModulesEither.left().value();
+ if(vfModules == null || vfModules.isEmpty() ){
+ log.info("vfModules empty for service {}", service.getUniqueId());
+ return true;
+ }
}
for (GroupInstance gi : groupInstances) {
@@ -373,7 +382,7 @@ public class ArtifactUuidFix {
if(isCheckVFModules && vfModules != null){
Optional<VfModuleArtifactPayloadEx> op = vfModules.stream().filter(vf -> vf.getVfModuleModelName().equals(gi.getGroupName())).findAny();
if(!op.isPresent()){
- log.error("Failed to find vfModule for group {}", gi.getGroupName());
+ log.error("Failed to find vfModule for group {}", gi.getGroupName());
return true;
}
vfModule = op.get();
@@ -383,13 +392,13 @@ public class ArtifactUuidFix {
}
}
}
-
+
}
return false;
}
private boolean isProblematicGroup(GroupDefinition gr, String resourceName,
- Map<String, ArtifactDefinition> deploymentArtifacts) {
+ Map<String, ArtifactDefinition> deploymentArtifacts) {
List<String> artifacts = gr.getArtifacts();
List<String> artifactsUuid = gr.getArtifactsUuid();
Set<String> artifactsSet = new HashSet<>();
@@ -454,14 +463,14 @@ public class ArtifactUuidFix {
}
private boolean isProblematicGroupInstance(GroupInstance gi, String instName, String servicename,
- Map<String, ArtifactDefinition> deploymentArtifacts, VfModuleArtifactPayloadEx vfModule) {
+ Map<String, ArtifactDefinition> deploymentArtifacts, VfModuleArtifactPayloadEx vfModule) {
List<String> artifacts = gi.getArtifacts();
List<String> artifactsUuid = gi.getArtifactsUuid();
List<String> instArtifactsUuid = gi.getGroupInstanceArtifactsUuid();
List<String> instArtifactsId = gi.getGroupInstanceArtifacts();
Set<String> instArtifatIdSet = new HashSet<>();
- Set<String> artifactsSet = new HashSet<>();
-
+ Set<String> artifactsSet = new HashSet<>();
+
log.info("check group {} for instance {} ", gi.getGroupName(), instName);
if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
log.info("No instance groups for instance {} in service {} ", instName, servicename);
@@ -548,12 +557,12 @@ public class ArtifactUuidFix {
if(vfModule != null && artifactsUuid != null){
return isProblematicVFModule(vfModule, artifactsUuid, instArtifactsUuid);
}
-
+
return false;
}
private boolean isProblematicVFModule(VfModuleArtifactPayloadEx vfModule, List<String> artifactsUuid,
- List<String> instArtifactsUuid) {
+ List<String> instArtifactsUuid) {
log.info(" isProblematicVFModule {} ", vfModule.getVfModuleModelName());
List<String> vfModuleArtifacts = vfModule.getArtifacts();
List<String> allArtifacts = new ArrayList<>();
@@ -580,10 +589,10 @@ public class ArtifactUuidFix {
return false;
}
-
+
private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
- Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
+ Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
boolean res = true;
log.info(" Fix started ***** ");
if (vfLst != null && !vfLst.isEmpty()) {
@@ -600,6 +609,7 @@ public class ArtifactUuidFix {
long time = System.currentTimeMillis();
String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
+
try(Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF8))) {
writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
List<Component> failedList = new ArrayList<>();
@@ -614,7 +624,7 @@ public class ArtifactUuidFix {
generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst, failedList);
}
-
+
for (Component component : vfLst) {
res = generateToscaPerComponent(fixedIds, component);
if (res) {
@@ -642,7 +652,7 @@ public class ArtifactUuidFix {
}
-
+
for (Component component : serviceList) {
res = generateToscaPerComponent(fixedIds, component);
if (res) {
@@ -690,7 +700,7 @@ public class ArtifactUuidFix {
}
private boolean generateAndSaveToscaArtifacts(Map<String, List<Component>> nodesToFixTosca, Set<String> fixedIds,
- List<? extends Component> componentsWithFailedGroups, List<Component> failedList) {
+ List<? extends Component> componentsWithFailedGroups, List<Component> failedList) {
boolean res = true;
log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts started ");
for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
@@ -751,9 +761,9 @@ public class ArtifactUuidFix {
log.error("Couldn't generate and save tosca template component unique id {}, name {} error: {}",
toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
res = false;
-
+
}
-
+
if (res) {
c.setToscaArtifacts(either.left().value().getToscaArtifacts());
fixedIds.add(toscaElementFull.getUniqueId());
@@ -770,7 +780,7 @@ public class ArtifactUuidFix {
}
private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum,
- EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
+ EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(),
edgeLabelEnum, groups.size());
boolean res = true;
@@ -860,7 +870,7 @@ public class ArtifactUuidFix {
}
private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap,
- List<GroupInstance> groupsToDelete, GroupInstance group) {
+ List<GroupInstance> groupsToDelete, GroupInstance group) {
if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", service.getUniqueId(),
group.getName());
@@ -871,9 +881,9 @@ public class ArtifactUuidFix {
groupArtifactsSet.addAll(groupInsArtifacts);
}
List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
-
+
clearGroupInstanceArtifacts(group);
-
+
for (String artifactId : groupArtifacts) {
fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId);
}
@@ -902,14 +912,14 @@ public class ArtifactUuidFix {
else{
group.setGroupInstanceArtifacts(new ArrayList<>());
}
- if(group.getGroupInstanceArtifactsUuid() != null )
+ if(group.getGroupInstanceArtifactsUuid() != null )
group.getGroupInstanceArtifactsUuid().clear();
else
group.setGroupInstanceArtifactsUuid(new ArrayList<>());
}
private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group,
- List<String> groupArtifacts, String artifactId) {
+ List<String> groupArtifacts, String artifactId) {
String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
group.getName(), artifactId, artifactlabel);
@@ -938,7 +948,7 @@ public class ArtifactUuidFix {
} else {
log.debug(
MIGRATION1707_ARTIFACT_UUID_FIX,
- group.getName(), correctArtifactId, correctArtifactUUID);
+ group.getName(), correctArtifactId, correctArtifactUUID);
Set<String> tmpSet = new HashSet<>(group.getGroupInstanceArtifacts());
tmpSet.add(correctArtifactId);
group.setGroupInstanceArtifacts(new ArrayList<>(tmpSet));
@@ -997,7 +1007,7 @@ public class ArtifactUuidFix {
}
private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group,
- List<String> groupArtifacts, String artifactId) {
+ List<String> groupArtifacts, String artifactId) {
String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
@@ -1037,7 +1047,7 @@ public class ArtifactUuidFix {
}
private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component,
- Service service) {
+ Service service) {
try {
// "service name, service id, state, version
StringBuilder sb = new StringBuilder(component.getName());
@@ -1071,7 +1081,7 @@ public class ArtifactUuidFix {
}
public boolean doFixTosca(Map<String, List<Component>> nodeToFix, Map<String, List<Component>> vfToFix,
- Map<String, List<Component>> serviceToFix) {
+ Map<String, List<Component>> serviceToFix) {
Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
@@ -1099,7 +1109,7 @@ public class ArtifactUuidFix {
}
public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
- Map<GraphPropertyEnum, Object> hasProps) {
+ Map<GraphPropertyEnum, Object> hasProps) {
Map<String, List<Component>> result = new HashMap<>();
try {
@@ -1146,7 +1156,7 @@ public class ArtifactUuidFix {
}
public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix,
- String name) {
+ String name) {
boolean result = true;
long time = System.currentTimeMillis();
String fileName = name + "_" + time + ".csv";
@@ -1238,7 +1248,7 @@ public class ArtifactUuidFix {
toscaArtifact.setEsId(toscaArtifact.getUniqueId());
toscaArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
- ESArtifactData artifactData = new ESArtifactData(toscaArtifact.getEsId(), decodedPayload);
+ DAOArtifactData artifactData = new DAOArtifactData(toscaArtifact.getEsId(), decodedPayload);
artifactCassandraDao.saveArtifact(artifactData);
log.debug("Tosca yaml artifact esId {} ", toscaArtifact.getEsId());
@@ -1270,7 +1280,7 @@ public class ArtifactUuidFix {
csarArtifact.setEsId(csarArtifact.getUniqueId());
csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
- ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+ DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
artifactCassandraDao.saveArtifact(artifactData);
log.debug("Tosca csar artifact esId {} ", csarArtifact.getEsId());
@@ -1285,29 +1295,29 @@ public class ArtifactUuidFix {
return Either.left(parent);
}
-
- private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF, Service service) {
- ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
+ private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF, Service service) {
- vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
- vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
- vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
- vfModuleArtifactDefinition.setArtifactGroupType(ArtifactGroupTypeEnum.DEPLOYMENT);
- vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
- vfModuleArtifactDefinition.setTimeout(0);
- vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
-
- return vfModuleArtifactDefinition;
- }
+ ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
+
+ vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
+ vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
+ vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
+ vfModuleArtifactDefinition.setArtifactGroupType(ArtifactGroupTypeEnum.DEPLOYMENT);
+ vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
+ vfModuleArtifactDefinition.setTimeout(0);
+ vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
+
+ return vfModuleArtifactDefinition;
+ }
private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
- ArtifactDefinition vfModuleArtifact) {
+ ArtifactDefinition vfModuleArtifact) {
log.debug("generate new vf module for component. name {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
-
+
String uniqueId = UniqueIdBuilder.buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
-
+
vfModuleArtifact.setUniqueId(uniqueId);
vfModuleArtifact.setEsId(vfModuleArtifact.getUniqueId());
@@ -1329,7 +1339,7 @@ public class ArtifactUuidFix {
.calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
vfModuleArtifact.setArtifactChecksum(newCheckSum);
- ESArtifactData artifactData = new ESArtifactData(vfModuleArtifact.getEsId(),
+ DAOArtifactData artifactData = new DAOArtifactData(vfModuleArtifact.getEsId(),
vfModulePayloadString.getBytes());
artifactCassandraDao.saveArtifact(artifactData);
@@ -1338,21 +1348,21 @@ public class ArtifactUuidFix {
}
}
-
+
private Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> parseVFModuleJson(ArtifactDefinition vfModuleArtifact) {
log.info("Try to get vfModule json from cassandra {}", vfModuleArtifact.getEsId());
- Either<ESArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
-
+ Either<DAOArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
+
if (vfModuleData.isRight()) {
CassandraOperationStatus resourceUploadStatus = vfModuleData.right().value();
StorageOperationStatus storageResponse = DaoStatusConverter.convertCassandraStatusToStorageStatus(resourceUploadStatus);
log.error("failed to fetch vfModule json {} from cassandra. Status is {}", vfModuleArtifact.getEsId(), storageResponse);
return Either.right(storageResponse);
-
+
}
- ESArtifactData esArtifactData = vfModuleData.left().value();
- String gsonData = new String( esArtifactData.getDataAsArray());
+ DAOArtifactData DAOArtifactData = vfModuleData.left().value();
+ String gsonData = new String( DAOArtifactData.getDataAsArray());
final Gson gson = new GsonBuilder().setPrettyPrinting().create();
JsonArray jsonElement = new JsonArray();
jsonElement = gson.fromJson(gsonData, jsonElement.getClass());
@@ -1361,10 +1371,10 @@ public class ArtifactUuidFix {
VfModuleArtifactPayloadEx vfModule = ComponentsUtils.parseJsonToObject(je.toString(), VfModuleArtifactPayloadEx.class);
vfModules.add(vfModule);
});
-
+
log.debug ("parse vf module finish {}", gsonData);
return Either.left(vfModules);
-
+
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
deleted file mode 100644
index 2e14b906ff..0000000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
+++ /dev/null
@@ -1,813 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.impl;
-
-
-import com.carrotsearch.hppc.cursors.ObjectCursor;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import fj.data.Either;
-import org.apache.commons.lang.SystemUtils;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
-import org.openecomp.sdc.be.auditing.api.AuditEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditAuthRequestEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditConsumerEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditGetUebClusterEventFactory;
-import org.openecomp.sdc.be.auditing.impl.category.AuditCategoryEventFactory;
-import org.openecomp.sdc.be.auditing.impl.category.AuditGetCategoryHierarchyEventFactory;
-import org.openecomp.sdc.be.auditing.impl.distribution.*;
-import org.openecomp.sdc.be.auditing.impl.resourceadmin.AuditResourceAdminEventMigrationFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditGetUsersListEventFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAccessEventFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAdminEventFactory;
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-import org.openecomp.sdc.be.dao.cassandra.schema.Table;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingActionEnum;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingGenericEvent;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
-import org.openecomp.sdc.be.resources.data.auditing.model.*;
-import org.openecomp.sdc.common.datastructure.AuditingFieldsKey;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import java.io.*;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.EnumMap;
-import java.util.Map;
-
-/**
- * Created by mlando on 5/16/2016.
- */
-public class DataMigration {
-
- private ObjectMapper jsonMapper = new ObjectMapper();
-
- private static Logger log = Logger.getLogger(DataMigration.class.getName());
-
- private ElasticSearchClient elasticSearchClient;
- private AuditCassandraDao auditCassandraDao;
- private ArtifactCassandraDao artifactCassandraDao;
-
- @Autowired
- public DataMigration(AuditCassandraDao auditCassandraDao,
- ArtifactCassandraDao artifactCassandraDao) {
- this.auditCassandraDao = auditCassandraDao;
- this.artifactCassandraDao = artifactCassandraDao;
- }
-
- /**
- * the method exports and imports the records from ES to cassandra the flow
- * will check to see if the files are not empty if the files are not empty
- * the export will be skiped and the flow will use the existing files. the
- * flow will check if the tables in cassandra are empty, if the tables are
- * not empty the proces will stop and exit. if the tables are empty the
- * method will import the records from the files. in case of a fail the flow
- * will exit and clear all the Cassandra tables.
- *
- * @param appConfigDir
- * the location of the dir in wich the output files will be
- * stored
- * @param exportFromEs
- * should the es be exported again and overwrite the old export
- * @param importToCassandra
- * should we import the data into cassandra
- * @return true in case the operation was successful.
- */
- public boolean migrateDataESToCassndra(String appConfigDir, boolean exportFromEs, boolean importToCassandra) {
- if (!initEsClient()) {
- return false;
- }
- Map<Table, File> files = createOutPutFiles(appConfigDir, exportFromEs);
- if (files == null) {
- return false;
- }
- if (exportFromEs && filesEmpty(files)) {
- Map<Table, PrintWriter> printerWritersMap = createWriters(files);
- if (printerWritersMap == null) {
- return false;
- }
- try {
- ImmutableOpenMap<String, IndexMetaData> indexData = getIndexData();
- for (ObjectCursor<String> key : indexData.keys()) {
- if (("resources".equalsIgnoreCase(key.value) || key.value.startsWith("auditingevents"))
- && !exportArtifacts(key.value, printerWritersMap)) {
- return false;
- }
- }
- } finally {
- if (elasticSearchClient != null) {
- elasticSearchClient.close();
- }
- for (PrintWriter writer : printerWritersMap.values()) {
- writer.close();
- }
- }
- }
-
- return !importToCassandra || importToCassndra(files);
- }
-
- private boolean initEsClient() {
- String configHome = System.getProperty("config.home");
- URL url = null;
- Settings settings = null;
- try {
- if (SystemUtils.IS_OS_WINDOWS) {
- url = new URL("file:///" + configHome + "/elasticsearch.yml");
- } else {
- url = new URL("file:" + configHome + "/elasticsearch.yml");
- }
- log.debug("URL {}", url);
- settings = Settings.settingsBuilder().loadFromPath(Paths.get(url.toURI())).build();
- } catch (MalformedURLException | URISyntaxException e1) {
- log.error("Failed to create URL in order to load elasticsearch yml", e1);
- return true;
- }
-
- this.elasticSearchClient = new ElasticSearchClient();
- this.elasticSearchClient.setClusterName(settings.get("cluster.name"));
- this.elasticSearchClient.setLocal(settings.get("elasticSearch.local"));
- this.elasticSearchClient.setTransportClient(settings.get("elasticSearch.transportclient"));
- try {
- elasticSearchClient.initialize();
- } catch (URISyntaxException e) {
- log.error(e.getMessage());
- return false;
- }
- return true;
- }
-
- /**
- * the method clears all the cassandra tables
- */
- private void truncateCassandraTable() {
- log.info("import failed. truncating Cassandra tables.");
- artifactCassandraDao.deleteAllArtifacts();
- auditCassandraDao.deleteAllAudit();
- }
-
- /**
- * the method imports the records from the files into cassandra
- *
- * @param files
- * a map of files holding
- * @return true if the operation was successful
- */
- private boolean importToCassndra(Map<Table, File> files) {
- log.info("starting to import date into Cassandra.");
- if (!validtaTablsNotEmpty(files))
- return true;
- for (Table table : files.keySet()) {
- log.info("importing recordes into {}", table.getTableDescription().getTableName());
- if (!handleImport(files, table)) {
- truncateCassandraTable();
- return false;
- }
- }
- log.info("finished to import date into Cassandra.");
- return true;
- }
-
- private boolean validtaTablsNotEmpty(Map<Table, File> files) {
- for (Table table : files.keySet()) {
- Either<Boolean, CassandraOperationStatus> isTableEmptyRes = checkIfTableIsEmpty(table);
- if (isTableEmptyRes.isRight() || !isTableEmptyRes.left().value()) {
- log.error("Cassandra table {} is not empty operation aborted.",
- table.getTableDescription().getTableName());
- return false;
- }
- }
- return true;
- }
-
- /**
- * the method retrieves the fields from the given map and generates
- * corresponding audit event according to the table name
- *
- * @param map
- * the map from which we will retrieve the fields enum values
- * @param table
- * the table we are going to store the record in.
- * @return an AuditingGenericEvent event representing the audit record that is going to be
- * created.
- */
- AuditingGenericEvent createAuditEvent(Map<AuditingFieldsKey, String> map, Table table) {
- AuditEventFactory factory = null;
- switch (table) {
- case USER_ADMIN_EVENT:
- factory = getAuditUserAdminEventFactory(map);
- break;
- case USER_ACCESS_EVENT:
- factory = getAuditUserAccessEventFactory(map);
- break;
- case RESOURCE_ADMIN_EVENT:
- factory = getAuditResourceAdminEventMigrationFactory(map);
- break;
- case DISTRIBUTION_DOWNLOAD_EVENT:
- factory = getAuditDistributionDownloadEventFactory(map);
- break;
- case DISTRIBUTION_ENGINE_EVENT:
- factory = getAuditDistributionEngineEventMigrationFactory(map);
- break;
- case DISTRIBUTION_NOTIFICATION_EVENT:
- factory = getAuditDistributionNotificationEventFactory(map);
- break;
- case DISTRIBUTION_STATUS_EVENT:
- factory = getAuditDistributionStatusEventFactory(map);
- break;
- case DISTRIBUTION_DEPLOY_EVENT:
- factory = getAuditDistributionDeployEventFactory(map);
- break;
- case DISTRIBUTION_GET_UEB_CLUSTER_EVENT:
- factory = getAuditGetUebClusterEventFactory(map);
- break;
- case AUTH_EVENT:
- factory = getAuditAuthRequestEventFactory(map);
- break;
- case CONSUMER_EVENT:
- factory = getAuditConsumerEventFactory(map);
- break;
- case CATEGORY_EVENT:
- factory = getAuditCategoryEventFactory(map);
- break;
- case GET_USERS_LIST_EVENT:
- factory = getAuditGetUsersListEventFactory(map);
- break;
- case GET_CATEGORY_HIERARCHY_EVENT:
- factory = getAuditGetCategoryHierarchyEventFactory(map);
- break;
- default:
- break;
- }
- return factory != null ? factory.getDbEvent() : null;
- }
-
- private AuditEventFactory getAuditGetCategoryHierarchyEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditGetCategoryHierarchyEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_DETAILS),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditGetUsersListEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditGetUsersListEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_USER_DETAILS),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditCategoryEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditCategoryEventFactory(
- AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_CATEGORY_NAME),
- map.get(AuditingFieldsKey.AUDIT_SUB_CATEGORY_NAME),
- map.get(AuditingFieldsKey.AUDIT_GROUPING_NAME),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditUserAccessEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditUserAccessEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_USER_UID),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditUserAdminEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditUserAdminEventFactory(
- AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_USER_BEFORE),
- map.get(AuditingFieldsKey.AUDIT_USER_AFTER),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditConsumerEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditConsumerEventFactory(
- AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_ECOMP_USER),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditAuthRequestEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditAuthRequestEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_USER_UID),
- map.get(AuditingFieldsKey.AUDIT_AUTH_URL),
- map.get(AuditingFieldsKey.AUDIT_AUTH_REALM),
- map.get(AuditingFieldsKey.AUDIT_AUTH_STATUS),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditGetUebClusterEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditGetUebClusterEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditResourceAdminEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditResourceAdminEventMigrationFactory(
- AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
- ResourceVersionInfo.newBuilder()
- .artifactUuid(map.get(AuditingFieldsKey.AUDIT_PREV_ARTIFACT_UUID))
- .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_STATE))
- .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_VERSION))
- .distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DPREV_STATUS))
- .build(),
- ResourceVersionInfo.newBuilder()
- .artifactUuid(map.get(AuditingFieldsKey.AUDIT_CURR_ARTIFACT_UUID))
- .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
- .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
- .distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DCURR_STATUS))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_INVARIANT_UUID),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_ARTIFACT_DATA),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_COMMENT),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_TOSCA_NODE_TYPE),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditDistributionDownloadEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditDistributionDownloadEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditDistributionEngineEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditDistributionEngineEventMigrationFactory(
- AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- DistributionTopicData.newBuilder()
- .notificationTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME))
- .statusTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_API_KEY),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVRIONMENT_NAME),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ROLE),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditDistributionDeployEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditDistributionDeployEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditDistributionStatusEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditDistributionStatusEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TIME),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
- private AuditEventFactory getAuditDistributionNotificationEventFactory(Map<AuditingFieldsKey, String> map) {
- return new AuditDistributionNotificationEventFactory(
- CommonAuditData.newBuilder()
- .description(map.get(AuditingFieldsKey.AUDIT_DESC))
- .status(map.get(AuditingFieldsKey.AUDIT_STATUS))
- .requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
- .serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
- .build(),
- new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
- map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
- ResourceVersionInfo.newBuilder()
- .state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
- .version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
- .build(),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
- map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
- new OperationalEnvAuditData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVIRONMENT_ID),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_VNF_WORKLOAD_CONTEXT),
- map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TENANT)),
- map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
- }
-
-
-
- /**
- * the method reads the content of the file intended for a given table, and
- * sores them in cassandra
- *
- * @param files
- * a map of files from which the recordes will be retrieved.
- * @param table
- * the name of the table we want to look up in the files and sore
- * in Cassandra // * @param store the function to call when
- * storing recordes in cassndra
- * @return true if the operation was successful
- */
- private boolean handleImport(Map<Table, File> files, Table table) {
- BufferedReader br = null;
- try {
- br = new BufferedReader(new FileReader(files.get(table)));
- String line = null;
- while ((line = br.readLine()) != null) {
- CassandraOperationStatus res = CassandraOperationStatus.GENERAL_ERROR;
- if (Table.ARTIFACT.equals(table)) {
- res = artifactCassandraDao.saveArtifact(jsonMapper.readValue(line, ESArtifactData.class));
- }
- else {
- AuditingGenericEvent recordForCassandra = createAuditRecordForCassandra(line, table);
- if (recordForCassandra != null) {
- res = auditCassandraDao.saveRecord(recordForCassandra);
- }
- }
- if (!res.equals(CassandraOperationStatus.OK)) {
- log.error("save recored to cassndra {} failed with status {} aborting.",
- table.getTableDescription().getTableName(), res);
- return false;
- }
- }
- return true;
- } catch (IOException e) {
- log.error("failed to read file", e);
- return false;
- } finally {
- if (br != null) {
- try {
- br.close();
- } catch (IOException e) {
- log.error("failed to close file reader", e);
- }
- }
- }
- }
-
- AuditingGenericEvent createAuditRecordForCassandra(String json, Table table) throws IOException{
- return createAuditEvent(parseToMap(json), table);
- }
-
- private Map<AuditingFieldsKey, String> parseToMap(String json) throws IOException {
- return jsonMapper.readValue(json, new TypeReference<Map<AuditingFieldsKey, String>>(){});
- }
-
- /**
- * the method checks if the given table is empty
- *
- * @param table
- * the name of the table we want to check
- * @return true if the table is empty
- */
- private Either<Boolean, CassandraOperationStatus> checkIfTableIsEmpty(Table table) {
- if (Table.ARTIFACT.equals(table)) {
- return artifactCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
- } else {
- return auditCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
- }
- }
-
- private boolean filesEmpty(Map<Table, File> files) {
- for (Table table : files.keySet()) {
- File file = files.get(table);
- if (file.length() != 0) {
- log.info("file:{} is not empty skipping export", table.getTableDescription().getTableName());
- return false;
- }
- }
- return true;
- }
-
- /**
- * the method reads the records from es index of audit's into a file as
- * json's.
- *
- * @param value
- * the name of the index we want
- * @param printerWritersMap
- * a map of the writers we use to write to a file.
- * @return true in case the export was successful.
- */
- private boolean exportAudit(String value, Map<Table, PrintWriter> printerWritersMap) {
- log.info("stratng to export audit data from es index{} to file.", value);
- QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
- SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(value).setScroll(new TimeValue(60000))
- .setQuery(queryBuilder).setSize(100).execute().actionGet();
- while (true) {
- for (SearchHit hit : scrollResp.getHits().getHits()) {
- PrintWriter out = printerWritersMap.get(TypeToTableMapping.getTableByType(hit.getType()));
- out.println(hit.getSourceAsString());
- }
- scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
- .setScroll(new TimeValue(60000)).execute().actionGet();
- if (scrollResp.getHits().getHits().length == 0) {
- break;
-
- }
- }
-
- log.info("export audit data from es to file. finished succsesfully");
- return true;
- }
-
- /**
- * the method reads the records from es index of resources into a file as
- * json's.
- *
- * @param index
- * the name of the index we want to read
- * @param printerWritersMap
- * a map of the writers we use to write to a file.
- * @return true in case the export was successful.
- */
- private boolean exportArtifacts(String index, Map<Table, PrintWriter> printerWritersMap) {
- log.info("stratng to export artifact data from es to file.");
- PrintWriter out = printerWritersMap.get(Table.ARTIFACT);
- QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
- SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(index).setScroll(new TimeValue(60000))
- .setQuery(queryBuilder).setSize(100).execute().actionGet();
- while (true) {
- for (SearchHit hit : scrollResp.getHits().getHits()) {
- ;
- out.println(hit.getSourceAsString());
- }
- scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
- .setScroll(new TimeValue(60000)).execute().actionGet();
- if (scrollResp.getHits().getHits().length == 0) {
- break;
-
- }
- }
-
- log.info("export artifact data from es to file. finished succsesfully");
- return true;
- }
-
- /**
- * the method retrieves all the indexes from elasticsearch
- *
- * @return a map of indexes and there metadata
- */
- private ImmutableOpenMap<String, IndexMetaData> getIndexData() {
- return elasticSearchClient.getClient().admin().cluster().prepareState().get().getState().getMetaData()
- .getIndices();
- }
-
- /**
- * the method creates all the files and dir which holds them. in case the
- * files exist they will not be created again.
- *
- * @param appConfigDir
- * the base path under which the output dir will be created and
- * the export result files the created filesa are named according
- * to the name of the table into which it will be imported.
- * @param exportToEs
- * if true all the export files will be recreated
- * @returnthe returns a map of tables and the files representing them them
- */
- private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) {
- Map<Table, File> result = new EnumMap<Table, File>(Table.class);
- File outputDir = new File(appConfigDir + "/output/");
- if (!createOutPutFolder(outputDir)) {
- return null;
- }
- for (Table table : Table.values()) {
- File file = new File(outputDir + "/" + table.getTableDescription().getTableName());
- if (exportToEs) {
- try {
- if (file.exists()) {
- Files.delete(file.toPath());
- }
- } catch (IOException e) {
- log.error("failed to delete output file {}", file.getAbsolutePath(), e);
- return null;
- }
- file = new File(outputDir + "/" + table.getTableDescription().getTableName());
- }
- if (!file.exists()) {
- try {
- file.createNewFile();
- } catch (IOException e) {
- log.error("failed to create output file {}", file.getAbsolutePath(), e);
- return null;
- }
- }
- result.put(table, file);
-
- }
- return result;
- }
-
- /**
- * the method create the writers to each file
- *
- * @param files
- * a map of the files according to table
- * @return returns a map of writers according to table.
- */
- private Map<Table, PrintWriter> createWriters(Map<Table, File> files) {
- Map<Table, PrintWriter> printerWritersMap = new EnumMap<>(Table.class);
-
- for (Table table : files.keySet()) {
- log.info("creating writer for {}", table);
- File file = files.get(table);
- try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)) )){
- printerWritersMap.put(table, out);
- log.info("creating writer for {} was successful", table);
- } catch (IOException e) {
- log.error("create writer to file failed",e);
- return null;
- }
- }
- return printerWritersMap;
- }
-
- /**
- * the method creates the output dir in case it does not exist
- *
- * @param outputDir
- * the path under wich the directory will be created.
- * @return true in case the create was succsesful or the dir already exists
- */
- private boolean createOutPutFolder(File outputDir) {
- if (!outputDir.exists()) {
- log.info("creating output dir {}", outputDir.getAbsolutePath());
- try {
- Files.createDirectories(outputDir.toPath());
- } catch (IOException e) {
- log.error("failed to create output dir {}", outputDir.getAbsolutePath(), e);
- return false;
- }
- }
- return true;
- }
-
- public enum TypeToTableMapping {
- USER_ADMIN_EVENT_TYPE(AuditingTypesConstants.USER_ADMIN_EVENT_TYPE,
- Table.USER_ADMIN_EVENT), USER_ACCESS_EVENT_TYPE(AuditingTypesConstants.USER_ACCESS_EVENT_TYPE,
- Table.USER_ACCESS_EVENT), RESOURCE_ADMIN_EVENT_TYPE(
- AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE,
- Table.RESOURCE_ADMIN_EVENT), DISTRIBUTION_DOWNLOAD_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE,
- Table.DISTRIBUTION_DOWNLOAD_EVENT), DISTRIBUTION_ENGINE_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE,
- Table.DISTRIBUTION_ENGINE_EVENT), DISTRIBUTION_NOTIFICATION_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE,
- Table.DISTRIBUTION_NOTIFICATION_EVENT), DISTRIBUTION_STATUS_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE,
- Table.DISTRIBUTION_STATUS_EVENT), DISTRIBUTION_DEPLOY_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE,
- Table.DISTRIBUTION_DEPLOY_EVENT), DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE(
- AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE,
- Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT), AUTH_EVENT_TYPE(
- AuditingTypesConstants.AUTH_EVENT_TYPE,
- Table.AUTH_EVENT), CONSUMER_EVENT_TYPE(
- AuditingTypesConstants.CONSUMER_EVENT_TYPE,
- Table.CONSUMER_EVENT), CATEGORY_EVENT_TYPE(
- AuditingTypesConstants.CATEGORY_EVENT_TYPE,
- Table.CATEGORY_EVENT), GET_USERS_LIST_EVENT_TYPE(
- AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE,
- Table.GET_USERS_LIST_EVENT), GET_CATEGORY_HIERARCHY_EVENT_TYPE(
- AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE,
- Table.GET_CATEGORY_HIERARCHY_EVENT);
-
- String typeName;
- Table table;
-
- TypeToTableMapping(String typeName, Table table) {
- this.typeName = typeName;
- this.table = table;
- }
-
- public String getTypeName() {
- return typeName;
- }
-
- public Table getTable() {
- return table;
- }
-
- public static Table getTableByType(String type) {
- for (TypeToTableMapping mapping : TypeToTableMapping.values()) {
- if (mapping.getTypeName().equalsIgnoreCase(type)) {
- return mapping.getTable();
- }
- }
- return null;
- }
- }
-
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
deleted file mode 100644
index 6b15cb0075..0000000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.impl;
-
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
-import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class EsToCassandraDataMigrationConfig {
- @Bean(name = "DataMigrationBean")
- public DataMigration dataMigration(AuditCassandraDao auditCassandraDao, ArtifactCassandraDao artifactCassandraDao) {
- return new DataMigration(auditCassandraDao, artifactCassandraDao);
- }
-
- @Bean(name = "artifact-cassandra-dao")
- public ArtifactCassandraDao artifactCassandraDao(CassandraClient cassandraClient) {
- return new ArtifactCassandraDao(cassandraClient);
- }
-
- @Bean(name = "audit-cassandra-dao")
- public AuditCassandraDao auditCassandraDao(CassandraClient cassandraClient) {
- return new AuditCassandraDao(cassandraClient);
- }
-
- @Bean(name = "cassandra-client")
- public CassandraClient cassandraClient() {
- return new CassandraClient();
- }
-
- @Bean(name = "sdc-schema-files-cassandra-dao")
- public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao(CassandraClient cassandraClient) {
- return new SdcSchemaFilesCassandraDao(cassandraClient);
- }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
index 57a7c251aa..2b96ba30b8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
@@ -22,7 +22,6 @@ package org.openecomp.sdc.asdctool.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
-import java.util.stream.Stream;
import org.openecomp.sdc.common.log.wrappers.Logger;
import java.io.IOException;
@@ -31,6 +30,7 @@ import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Stream;
/**
* simple util class to verify that the janusgraph export json graph is not corrupted
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
index deb766fff9..ae4a55903a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
@@ -21,20 +21,6 @@
package org.openecomp.sdc.asdctool.impl;
import com.google.gson.Gson;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.tinkerpop.gremlin.structure.Element;
@@ -54,6 +40,21 @@ import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.common.log.wrappers.Logger;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
public class GraphMLConverter {
private static final String STORAGE_BACKEND = "storage.backend";
@@ -95,6 +96,7 @@ public class GraphMLConverter {
return importJsonGraph(graph, inputFile, propertiesCriteriaToDelete);
} catch (Exception e) {
+ e.printStackTrace();
log.info("import graph failed ", e);
return false;
} finally {
@@ -121,6 +123,7 @@ public class GraphMLConverter {
log.info(LOG_FORMATTER, EXPORTED_FILE, result);
} catch (Exception e) {
+ e.printStackTrace();
log.info("export graph failed ", e);
return false;
} finally {
@@ -145,6 +148,7 @@ public class GraphMLConverter {
log.info(LOG_FORMATTER, EXPORTED_FILE, result);
} catch (Exception e) {
+ e.printStackTrace();
log.info("export exportGraphMl failed ", e);
return null;
} finally {
@@ -172,6 +176,7 @@ public class GraphMLConverter {
log.info(LOG_FORMATTER, EXPORTED_FILE, result);
} catch (Exception e) {
+ e.printStackTrace();
log.info("find Error In Json Graph failed ", e);
return false;
} finally {
@@ -208,8 +213,10 @@ public class GraphMLConverter {
result = outputFile;
} catch (Exception e) {
+ e.printStackTrace();
log.info("export Json Graph failed ", e);
graph.tx().rollback();
+ e.printStackTrace();
}
return result;
@@ -227,6 +234,7 @@ public class GraphMLConverter {
graph.tx().commit();
} catch (Exception e) {
graph.tx().rollback();
+ e.printStackTrace();
log.info("export Graph Ml failed ", e);
}
return result;
@@ -278,7 +286,9 @@ public class GraphMLConverter {
} catch (Exception e) {
log.info("Failed to import graph ", e);
+ e.printStackTrace();
graph.tx().rollback();
+ e.printStackTrace();
}
return result;
@@ -306,8 +316,10 @@ public class GraphMLConverter {
graph.tx().rollback();
} catch (Exception e) {
+ e.printStackTrace();
log.info("find Error In Json Graph failed ", e);
graph.tx().rollback();
+ e.printStackTrace();
}
return result;
@@ -355,8 +367,10 @@ public class GraphMLConverter {
result = outputFile;
} catch (Exception e) {
+ e.printStackTrace();
log.info("export Users failed ", e);
graph.tx().rollback();
+ e.printStackTrace();
}
return result;
@@ -397,6 +411,7 @@ public class GraphMLConverter {
log.info(EXPORTED_FILE, result);
} catch (Exception e) {
+ e.printStackTrace();
log.info("export Users failed ", e);
return false;
} finally {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
index 853709225a..21e22be10c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
@@ -20,12 +20,6 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
@@ -38,6 +32,13 @@ import org.jdom2.util.IteratorIterable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
public class GraphMLDataAnalyzer {
private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
index fbebe2cd27..33fbf29f17 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
@@ -20,12 +20,16 @@
package org.openecomp.sdc.asdctool.impl;
-import org.janusgraph.core.*;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphException;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphQuery;
+import org.janusgraph.core.PropertyKey;
import org.janusgraph.core.schema.ConsistencyModifier;
import org.janusgraph.core.schema.JanusGraphIndex;
import org.janusgraph.core.schema.JanusGraphManagement;
-import org.apache.tinkerpop.gremlin.structure.Edge;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.be.dao.graph.datatype.ActionEnum;
import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
index 883f5e50f4..e60640fa26 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
@@ -20,10 +20,10 @@
package org.openecomp.sdc.asdctool.impl;
-import org.janusgraph.core.JanusGraphFactory;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
import org.janusgraph.core.JanusGraphVertex;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.common.log.wrappers.Logger;
@@ -75,7 +75,8 @@ public class ProductLogic {
graph.tx().commit();
return productsToDelete;
} catch (Exception e) {
- log.info("get All Products failed - {}" , e);
+ e.printStackTrace();
+ log.info("get All Products failed - {}" , e);
if(graph != null) {
graph.tx().rollback();
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
index 9911fb73f3..7f9064fe71 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
@@ -20,10 +20,10 @@
package org.openecomp.sdc.asdctool.impl;
-import org.janusgraph.core.JanusGraphFactory;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
import org.janusgraph.core.JanusGraphQuery;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.asdctool.Utils;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
@@ -99,7 +99,7 @@ public class UpdatePropertyOnVertex {
return numberOfUpdatedVertexes;
} catch (Exception e) {
- log.info("update Property On Service At Least Certified failed -{}" , e);
+ e.printStackTrace();
graph.tx().rollback();
return null;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
index cee0ded669..3633be7c98 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
@@ -42,7 +42,11 @@ import org.openecomp.sdc.be.model.operations.StorageException;
import org.openecomp.sdc.common.log.wrappers.Logger;
import java.io.IOException;
-import java.util.*;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
index 1808175d99..0adaf51ed5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
@@ -19,13 +19,13 @@
*/
package org.openecomp.sdc.asdctool.impl.internal.tool;
-import java.io.IOException;
-import java.util.Map;
-
import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
import org.openecomp.sdc.asdctool.utils.ReportWriter;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import java.io.IOException;
+import java.util.Map;
+
public abstract class CommonInternalTool {
protected ReportWriter reportWriter;
private String reportType;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
index 78f0ecbc88..8d74ea5abb 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
@@ -19,24 +19,14 @@
*/
package org.openecomp.sdc.asdctool.impl.internal.tool;
-import java.io.IOException;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Scanner;
-import java.util.function.Supplier;
-import java.util.stream.Collectors;
-
import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
@@ -45,7 +35,7 @@ import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.DAOArtifactData;
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
import org.openecomp.sdc.common.api.ArtifactTypeEnum;
@@ -53,6 +43,16 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
import org.openecomp.sdc.common.util.GeneralUtility;
import org.springframework.beans.factory.annotation.Autowired;
+import java.io.IOException;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Scanner;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
@org.springframework.stereotype.Component("csarGenerator")
public class CsarGenerator extends CommonInternalTool {
@@ -174,7 +174,7 @@ public class CsarGenerator extends CommonInternalTool {
csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
- ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+ DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
artifactCassandraDao.saveArtifact(artifactData);
ConsoleWriter.dataLine("Artifact generated and saved into Cassandra ", csarArtifact.getArtifactLabel());
report(component, csarArtifact);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
index 3bd13d0eff..62dd489810 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
@@ -19,11 +19,11 @@
*/
package org.openecomp.sdc.asdctool.impl.internal.tool;
-import org.janusgraph.core.JanusGraphVertex;
import fj.data.Either;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraphVertex;
import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
index cc75dfc968..cdb3b2c5a5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
@@ -24,14 +24,14 @@
package org.openecomp.sdc.asdctool.impl.validator.config;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
/**
* Created by chaya on 7/4/2017.
*/
@@ -76,6 +76,7 @@ public class ValidationConfigManager {
input = new FileInputStream(path);
prop.load(input);
} catch (IOException ex) {
+ ex.printStackTrace();
log.info("FileInputStream failed - {}", ex);
}
return prop;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
index d59a9aae6a..b83417c6c1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
@@ -20,11 +20,16 @@
package org.openecomp.sdc.asdctool.impl.validator.config;
-import java.util.List;
import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
-import org.openecomp.sdc.asdctool.impl.validator.executers.*;
+import org.openecomp.sdc.asdctool.impl.validator.executers.IArtifactValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
@@ -46,7 +51,13 @@ import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
import org.openecomp.sdc.be.dao.jsongraph.HealingJanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.*;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ArchiveOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ByToscaNameDerivedNodeTypeResolver;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.GroupsOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTemplateOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTypeOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.api.IGraphLockOperation;
import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
import org.openecomp.sdc.config.CatalogBESpringConfig;
@@ -58,6 +69,8 @@ import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.FileSystemResource;
+import java.util.List;
+
/**
* Created by chaya on 7/3/2017.
*/
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
index 33cf9d46da..a5e7d7873b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
@@ -34,8 +34,19 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.common.log.wrappers.Logger;
-import java.io.*;
-import java.util.*;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
import java.util.stream.Collectors;
public class ArtifactValidatorExecuter{
@@ -134,7 +145,7 @@ public class ArtifactValidatorExecuter{
try {
// "service name, service id, state, version
for(Component component: components ){
- StringBuilder sb = new StringBuilder(component.getName());
+ StringBuffer sb = new StringBuffer(component.getName());
sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()).append(",").append(component.getLifecycleState()).append(",").append(component.getVersion());
sb.append("\n");
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
index 99e6637a08..9fa92200ac 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
@@ -26,11 +26,11 @@ import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
public class NodeToscaArtifactsValidatorExecuter extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
protected String name;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
index ed4b6eabf8..aad803e672 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
@@ -26,12 +26,12 @@ import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import org.springframework.beans.factory.annotation.Autowired;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.springframework.beans.factory.annotation.Autowired;
@org.springframework.stereotype.Component
public class ServiceToscaArtifactsValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
index 8f36dd3d85..a52fb370d5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
@@ -25,10 +25,10 @@ import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
-import org.springframework.stereotype.Component;
/**
* Created by chaya on 7/4/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
index 5287ea1b7a..ca027cb3ed 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
@@ -24,17 +24,22 @@ import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.tasks.TopologyTemplateValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
/**
* Created by chaya on 7/3/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
index 29d49ae592..f1c9af681c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
@@ -27,11 +27,11 @@ import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
public class VFToscaArtifactValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
index eefd19594a..181495ad44 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
@@ -20,13 +20,14 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import java.util.List;
import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.springframework.beans.factory.annotation.Autowired;
+import java.util.List;
+
/**
* Created by chaya on 7/3/2017.
*/
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
index 2e804cc8a2..11c80eae70 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
@@ -21,12 +21,7 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-
+import fj.data.Either;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -41,7 +36,11 @@ import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
-import fj.data.Either;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
/**
* Created by chaya on 7/6/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
index d6fafcbb5e..d45c896799 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
@@ -37,7 +37,12 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOper
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.springframework.beans.factory.annotation.Autowired;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
import java.util.stream.Collectors;
/**
@@ -108,8 +113,7 @@ public class ModuleJsonTask extends ServiceValidationTask {
}
private boolean isAfterSubmitForTesting(GraphVertex vertex){
- List allowedStates = new ArrayList<>(Arrays.asList(LifecycleStateEnum.READY_FOR_CERTIFICATION.name(),
- LifecycleStateEnum.CERTIFICATION_IN_PROGRESS.name(), LifecycleStateEnum.CERTIFIED.name()));
+ List allowedStates = new ArrayList<>(Arrays.asList(LifecycleStateEnum.CERTIFIED.name()));
return allowedStates.contains(vertex.getMetadataProperty(GraphPropertyEnum.STATE));
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
index 2be8f921fa..e575ffce24 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
@@ -21,19 +21,22 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
-import java.util.*;
-
import org.apache.commons.lang.text.StrBuilder;
import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
/**
* Created by chaya on 7/5/2017.
*/
@@ -50,6 +53,7 @@ public class ReportManager {
initCsvFile();
initReportFile();
} catch (IOException e) {
+ e.printStackTrace();
log.info("Init file failed - {}", e.getClass().getSimpleName(), e);
}
}
@@ -98,6 +102,7 @@ public class ReportManager {
Files.write(Paths.get(reportOutputFilePath), new StrBuilder().appendNewLine().toString().getBytes(), StandardOpenOption.APPEND);
Files.write(Paths.get(reportOutputFilePath), message.getBytes(), StandardOpenOption.APPEND);
} catch (IOException e) {
+ e.printStackTrace();
log.info("write to file failed - {}", e.getClass().getSimpleName(), e);
}
}
@@ -145,6 +150,7 @@ public class ReportManager {
new StrBuilder().appendNewLine().toString().getBytes(),
StandardOpenOption.APPEND);
} catch (IOException e) {
+ e.printStackTrace();
log.info("write to file failed - {}", e.getClass().getSimpleName(), e);
}
}));
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
index 511e9baffc..882a4e12aa 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
@@ -20,14 +20,14 @@
package org.openecomp.sdc.asdctool.main;
-import java.util.Scanner;
-
import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
import org.openecomp.sdc.asdctool.configuration.CsarGeneratorConfiguration;
import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import java.util.Scanner;
+
public class CsarGeneratorTool extends SdcInternalTool {
public static void main(String[] args) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
index d2d5e77f2b..4b3496faa6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
@@ -35,67 +35,70 @@ public class DataSchemaMenu {
private static Logger log = Logger.getLogger(DataSchemaMenu.class.getName());
- public static void main(String[] args) {
+ public static void main(String[] args) {
String operation = args[0];
- String appConfigDir = args[1];
+ String appConfigDir = args[1];
- if (args == null || args.length < 2) {
- usageAndExit();
- }
+ if (args == null || args.length < 2) {
+ usageAndExit();
+ }
- ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
- ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+ ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+
+ try {
SdcSchemaBuilder sdcSchemaBuilder = new SdcSchemaBuilder(new SdcSchemaUtils(),
ConfigurationManager.getConfigurationManager().getConfiguration()::getCassandraConfig);
- switch (operation.toLowerCase()) {
- case "create-cassandra-structures":
- log.debug("Start create cassandra keyspace, tables and indexes");
+ switch (operation.toLowerCase()) {
+ case "create-cassandra-structures":
+ log.debug("Start create cassandra keyspace, tables and indexes");
if (sdcSchemaBuilder.createSchema()) {
- log.debug("create cassandra keyspace, tables and indexes successfull");
- System.exit(0);
- } else {
- log.debug("create cassandra keyspace, tables and indexes failed");
- System.exit(2);
- }
- break;
+ log.debug("create cassandra keyspace, tables and indexes successfull");
+ System.exit(0);
+ } else {
+ log.debug("create cassandra keyspace, tables and indexes failed");
+ System.exit(2);
+ }
case "create-janusgraph-structures":
log.debug("Start create janusgraph keyspace");
String janusGraphCfg = 2 == args.length ? configurationManager.getConfiguration().getJanusGraphCfgFile() : args[2];
if (JanusGraphInitializer.createGraph(janusGraphCfg)) {
log.debug("create janusgraph keyspace successfull");
- System.exit(0);
- } else {
+ System.exit(0);
+ } else {
log.debug("create janusgraph keyspace failed");
- System.exit(2);
- }
- break;
- case "clean-cassndra":
- log.debug("Start clean keyspace, tables");
+ System.exit(2);
+ }
+ case "clean-cassndra":
+ log.debug("Start clean keyspace, tables");
if (sdcSchemaBuilder.deleteSchema()) {
- log.debug(" successfull");
- System.exit(0);
- } else {
- log.debug(" failed");
- System.exit(2);
- }
- break;
- default:
- usageAndExit();
- break;
- }
- }
+ log.debug(" successfull");
+ System.exit(0);
+ } else {
+ log.debug(" failed");
+ System.exit(2);
+ }
+ default:
+ usageAndExit();
+ }
+ } catch (Throwable t) {
+ t.printStackTrace();
+ log.debug("create cassandra keyspace, tables and indexes failed");
+ System.exit(3);
+ }
+ }
- private static void usageAndExit() {
- DataSchemeUsage();
- System.exit(1);
- }
+ private static void usageAndExit() {
+ DataSchemeUsage();
+ System.exit(1);
+ }
- private static void DataSchemeUsage() {
- System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+ private static void DataSchemeUsage() {
+ System.out.println("Usage: create-cassandra-structures <configuration dir> ");
System.out.println("Usage: create-janusgraph-structures <configuration dir> ");
- }
+ }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
index d30249eb6b..309d23ca14 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
@@ -20,14 +20,14 @@
package org.openecomp.sdc.asdctool.main;
-import java.util.Scanner;
-
import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
import org.openecomp.sdc.asdctool.configuration.InternalToolConfiguration;
import org.openecomp.sdc.asdctool.impl.internal.tool.DeleteComponentHandler;
import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import java.util.Scanner;
+
public class DeleteComponentTool extends SdcInternalTool{
private static final String PSW = "ItIsTimeToDelete";
@@ -39,11 +39,6 @@ public class DeleteComponentTool extends SdcInternalTool{
String appConfigDir = args[0];
String password = args[1];
- if ( !PSW.equals(password) ){
- ConsoleWriter.dataLine("Wrong password");
- System.exit(1);
- }
-
disableConsole();
ConsoleWriter.dataLine("STARTED... ");
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
deleted file mode 100644
index c119d7e1ff..0000000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.main;
-
-import org.openecomp.sdc.asdctool.impl.DataMigration;
-import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
-import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.common.api.ConfigurationSource;
-import org.openecomp.sdc.common.impl.ExternalConfiguration;
-import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-
-public class EsToCassandraDataMigrationMenu {
-
- private static Logger log = Logger.getLogger(EsToCassandraDataMigrationMenu.class.getName());
-
- public static void main(String[] args) {
-
- if (args == null || args.length < 2) {
- usageAndExit();
- }
- String operation = args[0];
-
- String appConfigDir = args[1];
- System.setProperty("config.home", appConfigDir);
- ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
- appConfigDir);
- ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-
- AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
- EsToCassandraDataMigrationConfig.class);
- DataMigration dataMigration = null;
- try {
- switch (operation.toLowerCase()) {
- case "es-to-cassndra-migration":
- dataMigration = (DataMigration) context.getBean("DataMigrationBean");
- log.debug("Start migration from ES to C* ");
- if (dataMigration.migrateDataESToCassndra(appConfigDir, true, true)) {
- log.debug("migration from ES to C* was finished successfull");
- System.exit(0);
- } else {
- log.debug("migration from ES to C* failed");
- System.exit(2);
- }
- break;
- case "es-to-cassndra-migration-export-only":
- dataMigration = (DataMigration) context.getBean("DataMigrationBean");
- log.debug("Start migration export only from ES to C* ");
- if (dataMigration.migrateDataESToCassndra(appConfigDir, true, false)) {
- log.debug("migration export only from ES to C* was finished successfull");
- System.exit(0);
- } else {
- log.debug("migration export only from ES to C* failed");
- System.exit(2);
- }
- break;
- case "es-to-cassndra-migration-import-only":
- dataMigration = (DataMigration) context.getBean("DataMigrationBean");
- log.debug("Start migration import only from ES to C* ");
- if (dataMigration.migrateDataESToCassndra(appConfigDir, false, true)) {
- log.debug("migration import only from ES to C* was finished successfull");
- System.exit(0);
- } else {
- log.debug("migration import only from ES to C* failed");
- System.exit(2);
- }
- break;
- default:
- usageAndExit();
- }
- } catch (Throwable t) {
- log.info("data migration failed - {}", t);
- System.exit(3);
- }
- }
-
- private static void usageAndExit() {
- MigrationUsage();
- System.exit(1);
- }
-
- private static void MigrationUsage() {
- System.out.println(
- "Usage: es-to-cassndra-migration/es-to-cassndra-migration-import-only/es-to-cassndra-migration-export-only <configuration dir>");
- }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
index 9c8ca992aa..98aea2648c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
@@ -22,13 +22,14 @@
package org.openecomp.sdc.asdctool.main;
+import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
+import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
+import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
+
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
-import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
-import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
-import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
public class ExportImportMenu {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
deleted file mode 100644
index 3c4f7458d9..0000000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.main;
-
-import fj.data.Either;
-import org.openecomp.sdc.asdctool.cli.CLIToolData;
-import org.openecomp.sdc.asdctool.cli.SpringCLITool;
-import org.openecomp.sdc.asdctool.configuration.GetConsumersConfiguration;
-import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
-import org.openecomp.sdc.be.resources.data.ConsumerData;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-public class GetConsumersMenu extends SpringCLITool {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(GetConsumersMenu.class);
-
- public static void main(String[] args) {
- GetConsumersMenu getConsumersMenu = new GetConsumersMenu();
- CLIToolData cliToolData = getConsumersMenu.init(args);
- ConsumerOperation consumersService = cliToolData.getSpringApplicationContext().getBean(ConsumerOperation.class);
- printConsumers(getConsumersMenu, consumersService);
- }
-
- private static void printConsumers(GetConsumersMenu getConsumersMenu, ConsumerOperation consumersService) {
- Either<List<ConsumerData>, StorageOperationStatus> allConsumers = consumersService.getAll();
- allConsumers.left().foreachDoEffect(getConsumersMenu::printConsumers);
- allConsumers.right().foreachDoEffect(getConsumersMenu::printErr);
- }
-
- private void printConsumers(List<ConsumerData> consumers) {
- System.out.println("SDC consumers: ");
- consumers.forEach(consumer -> {
- System.out.println("#########################");
- System.out.println(consumer);
- });
- System.exit(0);
- }
-
- private void printErr(StorageOperationStatus err) {
- String errMsg = String.format("failed to fetch consumers. reason: %s", err);
- LOGGER.error(errMsg);
- System.err.print(errMsg);
- System.exit(1);
- }
-
- @Override
- protected String commandName() {
- return "get-consumers";
- }
-
- @Override
- protected Class<?> getSpringConfigurationClass() {
- return GetConsumersConfiguration.class;
- }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
index aeb7abe63a..2fb99e29f3 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
@@ -20,11 +20,10 @@
package org.openecomp.sdc.asdctool.main;
+import ch.qos.logback.core.Appender;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.slf4j.LoggerFactory;
-import ch.qos.logback.core.Appender;
-
public abstract class SdcInternalTool {
protected static void disableConsole() {
org.slf4j.Logger rootLogger = LoggerFactory.getILoggerFactory().getLogger(Logger.ROOT_LOGGER_NAME);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
index eb1d487cdd..47a08ea70e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
@@ -20,6 +20,21 @@
package org.openecomp.sdc.asdctool.main;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang3.ArrayUtils;
+import org.openecomp.sdc.asdctool.configuration.SdcSchemaFileImportConfiguration;
+import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
+import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.Yaml;
+
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
@@ -36,20 +51,6 @@ import java.util.Map;
import java.util.stream.Stream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.lang3.ArrayUtils;
-import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
-import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
-import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
-import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
-import org.openecomp.sdc.common.api.ConfigurationSource;
-import org.openecomp.sdc.common.impl.ExternalConfiguration;
-import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-import org.yaml.snakeyaml.DumperOptions;
-import org.yaml.snakeyaml.Yaml;
public class SdcSchemaFileImport {
@@ -106,10 +107,11 @@ public class SdcSchemaFileImport {
//Loop over schema file list and create each yaml file from /import/tosca folder
SchemaZipFileEnum[] schemaFileList = SchemaZipFileEnum.values();
for (SchemaZipFileEnum schemaZipFileEnum : schemaFileList) {
- String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR + schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
- try(InputStream input = new FileInputStream(new File(pathname));) {
+ try {
//get the source yaml file
+ String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR + schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
System.out.println("Processing file "+pathname+"....");
+ InputStream input = new FileInputStream(new File(pathname));
//Convert the content of file to yaml
Yaml yamlFileSource = new Yaml();
Object content = yamlFileSource.load(input);
@@ -275,6 +277,6 @@ public class SdcSchemaFileImport {
private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
new ConfigurationManager(configurationSource);
- return new AnnotationConfigApplicationContext(EsToCassandraDataMigrationConfig.class);
+ return new AnnotationConfigApplicationContext(SdcSchemaFileImportConfiguration.class);
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
index e5e0740794..65f8c7be72 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
@@ -27,6 +27,8 @@ import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao;
import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
+import org.openecomp.sdc.be.components.distribution.engine.DmaapClientFactory;
+import org.openecomp.sdc.be.components.health.HealthCheckBusinessLogic;
import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
import org.openecomp.sdc.be.components.scheduledtasks.ComponentsCleanBusinessLogic;
@@ -42,12 +44,10 @@ import org.openecomp.sdc.be.model.operations.api.IGroupTypeOperation;
import org.openecomp.sdc.be.model.operations.impl.InterfaceLifecycleOperation;
import org.openecomp.sdc.config.CatalogBESpringConfig;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
import java.util.ArrayList;
import java.util.List;
@@ -55,8 +55,8 @@ import java.util.List;
@Configuration
@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
- "org.openecomp.sdc.asdctool.migration.config.mocks"
- })
+ "org.openecomp.sdc.asdctool.migration.config.mocks",
+ "org.openecomp.sdc.be.filters" })
public class MigrationSpringConfig {
@Autowired(required=false)
@@ -85,14 +85,6 @@ public class MigrationSpringConfig {
return new MigrationTasksDao(cassandraClient);
}
- @Bean(name = "elasticsearchConfig")
- public PropertiesFactoryBean mapper() {
- String configHome = System.getProperty("config.home");
- PropertiesFactoryBean bean = new PropertiesFactoryBean();
- bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
- return bean;
- }
-
@Bean(name = "componentsCleanBusinessLogic")
public ComponentsCleanBusinessLogic componentsCleanBusinessLogic(
IElementOperation elementDao,
@@ -108,5 +100,12 @@ public class MigrationSpringConfig {
groupInstanceOperation, groupTypeOperation, interfaceOperation, interfaceLifecycleTypeOperation, resourceBusinessLogic,
serviceBusinessLogic, artifactToscaOperation);
}
+
+ @Bean(name = "dmaapClientFactory")
+ public DmaapClientFactory getDmaapClientFactory() {return new DmaapClientFactory();}
+ @Bean(name = "healthCheckBusinessLogic")
+ public HealthCheckBusinessLogic getHealthCheckBusinessLogic() {
+ return new HealthCheckBusinessLogic();
+ }
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java
new file mode 100644
index 0000000000..e8c6a955f0
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java
@@ -0,0 +1,87 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import com.att.nsa.apiClient.credentials.ApiCredential;
+import fj.data.Either;
+import org.openecomp.sdc.be.components.distribution.engine.CambriaErrorResponse;
+import org.openecomp.sdc.be.components.distribution.engine.ICambriaHandler;
+import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
+import org.openecomp.sdc.be.components.distribution.engine.SubscriberTypeEnum;
+import org.springframework.stereotype.Component;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+
+@Component("cambriaHandler")
+public class CambriaHandlerMock implements ICambriaHandler {
+
+ @Override
+ public Either<Set<String>, CambriaErrorResponse> getTopics(List<String> hostSet) {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse createTopic(Collection<String> hostSet, String apiKey, String secretKey, String topicName, int partitionCount, int replicationCount) {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse unRegisterFromTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse registerToTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+ return null;
+ }
+
+ @Override
+ public com.att.nsa.cambria.client.CambriaConsumer createConsumer(Collection<String> hostSet, String topicName, String apiKey, String secretKey, String consumerId, String consumerGroup, int timeoutMS) throws Exception {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse sendNotification(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data) {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse sendNotificationAndClose(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data, long waitBeforeCloseTimeout) {
+ return null;
+ }
+
+ @Override
+ public CambriaErrorResponse getApiKey(String server, String apiKey) {
+ return null;
+ }
+
+ @Override
+ public Either<ApiCredential, CambriaErrorResponse> createUebKeys(List<String> hostSet) {
+ return null;
+ }
+
+ @Override
+ public Either<Iterable<String>, CambriaErrorResponse> fetchFromTopic(com.att.nsa.cambria.client.CambriaConsumer topicConsumer) {
+ return null;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
index c4150e41a1..dde7f6a2ac 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
@@ -29,6 +29,8 @@ import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
import org.springframework.stereotype.Component;
+import java.util.List;
+
@Component("distributionEngine")
public class DistributionEngineMock implements IDistributionEngine {
@Override
@@ -76,4 +78,9 @@ public class DistributionEngineMock implements IDistributionEngine {
return null;
}
+ @Override
+ public OperationalEnvironmentEntry getEnvironmentByDmaapUebAddress(List<String> dmaapUebAddress) {
+ return null;
+ }
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
index 04b398b7ce..46470cc04e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
@@ -2,14 +2,14 @@
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,27 +18,27 @@
* ============LICENSE_END=========================================================
*/
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package org.openecomp.sdc.asdctool.migration.config.mocks;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+import org.openecomp.sdc.be.components.health.PortalHealthCheckBuilder;
+import org.springframework.stereotype.Component;
-public class ElasticSearchClientMock extends ElasticSearchClient {
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
- @Override
- public void initialize() {
+@Component("portalHealthCheckBusinessLogic")
+public class PortalHealthCheckBuilderMock extends PortalHealthCheckBuilder {
- }
@Override
- public void setClusterName(final String clusterName) {
-
+ @PostConstruct
+ public PortalHealthCheckBuilder init() {
+ return null;
}
@Override
- public void setLocal(final String strIsLocal) {
- }
+ @PreDestroy
+ protected void destroy() {
- @Override
- public void setTransportClient(final String strIsTransportclient) {
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
index a713f9243a..17c3aea42c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
@@ -33,7 +33,7 @@ public class DBVersion implements Comparable<DBVersion>{
/**
* The current db version. should be tested against real db to verify it is compatible to the db version
*/
- public static final DBVersion CURRENT_VERSION = new DBVersion(1710, 0);
+ public static final DBVersion DEFAULT_VERSION = new DBVersion(1710, 0);
private DBVersion(BigInteger major, BigInteger minor) {
this.major = major;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
index f341ab2792..5ed2e56408 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
@@ -27,7 +27,7 @@ public interface PostMigration extends IMigrationStage {
@Override
default
public DBVersion getVersion() {
- return DBVersion.CURRENT_VERSION;
+ return DBVersion.DEFAULT_VERSION;
}
@Override
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
index 2dd51fcefd..74c74052e7 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
@@ -31,6 +31,9 @@ public interface MigrationTasksAccessor {
@Query("SELECT minor_version FROM sdcrepository.migrationTasks WHERE major_version = :majorVersion order by minor_version desc limit 1")
ResultSet getLatestMinorVersion(@Param("majorVersion") Long majorVersion);
+ @Query("SELECT major_version FROM sdcrepository.migrationTasks")
+ ResultSet getLatestMajorVersion();
+
@Query("DELETE FROM sdcrepository.migrationTasks WHERE major_version = :majorVersion")
void deleteTasksForMajorVersion(@Param("majorVersion") Long majorVersion);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
index 1d7e66250e..aabd4d8bb0 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
@@ -27,6 +27,7 @@ import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;
import fj.data.Either;
import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
import org.openecomp.sdc.be.dao.cassandra.CassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -38,6 +39,9 @@ import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.math.BigInteger;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
@Service
public class MigrationTasksDao extends CassandraDao {
@@ -77,13 +81,29 @@ public class MigrationTasksDao extends CassandraDao {
try {
ResultSet latestMinorVersion = migrationTasksAccessor.getLatestMinorVersion(majorVersion.longValue());
Row minorVersionRow = latestMinorVersion.one();
- return minorVersionRow == null ? BigInteger.valueOf(Long.MIN_VALUE) : BigInteger.valueOf(minorVersionRow.getLong(0));
+ return minorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMinor() : BigInteger.valueOf(minorVersionRow.getLong(0));
} catch (RuntimeException e) {
logger.error("failed to get latest minor version for major version {}", majorVersion, e);
throw e;
}
}
+ public BigInteger getLatestMajorVersion() {
+ try {
+ ResultSet latestMajorVersion = migrationTasksAccessor.getLatestMajorVersion();
+ List<Row> all = latestMajorVersion.all();
+ Long majorVersionRow = null;
+ if (all.size() != 0){
+ List<Long> majorVersions = all.stream().map(p -> p.getLong(0)).collect(Collectors.toList());
+ majorVersionRow = Collections.max(majorVersions);
+ }
+ return majorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMajor() : BigInteger.valueOf(majorVersionRow);
+ } catch (RuntimeException e) {
+ logger.error("failed to get latest major version ", e);
+ throw e;
+ }
+ }
+
public void deleteAllTasksForVersion(BigInteger majorVersion) {
try {
migrationTasksAccessor.deleteTasksForMajorVersion(majorVersion.longValue());
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
index 9e62530d17..9141295dd4 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
@@ -35,13 +35,13 @@ public class SdcRepoService {
}
public DBVersion getLatestDBVersion() {
- BigInteger currentMajorVersion = DBVersion.CURRENT_VERSION.getMajor();
+ BigInteger currentMajorVersion = migrationTasksDao.getLatestMajorVersion();
BigInteger latestMinorVersion = migrationTasksDao.getLatestMinorVersion(currentMajorVersion);
- return latestMinorVersion == null ? DBVersion.from(currentMajorVersion, BigInteger.valueOf(Integer.MIN_VALUE)) : DBVersion.from(currentMajorVersion, latestMinorVersion);
+ return DBVersion.from(currentMajorVersion, latestMinorVersion);
}
public void clearTasksForCurrentMajor() {
- BigInteger currentMajorVersion = DBVersion.CURRENT_VERSION.getMajor();
+ BigInteger currentMajorVersion = DBVersion.DEFAULT_VERSION.getMajor();
migrationTasksDao.deleteAllTasksForVersion(currentMajorVersion);
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java
new file mode 100644
index 0000000000..b0a1d502aa
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java
@@ -0,0 +1,178 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.janusgraph.core.JanusGraphVertex;
+import org.openecomp.sdc.asdctool.migration.tasks.mig2002.SdcCollapsingRolesRFCstateMigration;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+public abstract class InstanceMigrationBase {
+
+ private static final Logger log = LoggerFactory.getLogger(InstanceMigrationBase.class);
+ protected JanusGraphDao janusGraphDao;
+
+ public InstanceMigrationBase(JanusGraphDao janusGraphDao) {
+ this.janusGraphDao = janusGraphDao;
+ }
+
+ protected StorageOperationStatus upgradeTopologyTemplates() {
+ Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
+ hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+ hasNotProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC);
+
+ return janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, hasNotProps, JsonParseFlagEnum.ParseAll)
+ .either(this::proceed, this::handleError);
+ }
+
+ protected abstract StorageOperationStatus handleOneContainer(GraphVertex containerV);
+
+ protected StorageOperationStatus proceed(List<GraphVertex> containersV) {
+ int failureCounter = 0;
+ log.info("found {} vertices to migrate ", containersV.size());
+ for (GraphVertex container : containersV) {
+ StorageOperationStatus storageOperationStatus = handleOneContainer(container);
+ if (storageOperationStatus != StorageOperationStatus.OK) {
+ failureCounter++;
+ }
+ }
+
+ if (failureCounter > 0) {
+ log.info("Failed to update {} vertices", failureCounter);
+ } else {
+ log.info("All vertices were successfully updated");
+ }
+
+ return StorageOperationStatus.OK;
+ }
+
+ protected GraphVertex getVertexById(String vertexId) {
+ Either<GraphVertex, JanusGraphOperationStatus> vertexById = janusGraphDao.getVertexById(vertexId);
+ if (vertexById.isRight()) {
+ log.info("Exception occurred while query vertexId: {} exception: {} " + vertexId + vertexById.right().value());
+ return null;
+ }
+ else return vertexById.left().value();
+ }
+
+ protected StorageOperationStatus updateVertexAndCommit(GraphVertex graphVertex) {
+ StorageOperationStatus status;
+ if ((status = janusGraphDao.updateVertex(graphVertex)
+ .either(v -> StorageOperationStatus.OK, this::handleError)) != StorageOperationStatus.OK) {
+ return status;
+ }
+ return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(janusGraphDao.commit());
+ }
+
+ protected StorageOperationStatus handleError(JanusGraphOperationStatus err) {
+ return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(JanusGraphOperationStatus.NOT_FOUND == err ? JanusGraphOperationStatus.OK : err);
+ }
+
+ protected void removeEdges(Iterator<Edge> edges) {
+
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+ edge.remove();
+ }
+ }
+
+ protected void removeEdgesInState(Iterator<Edge> edges, String state) {
+
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+ String edgeState = (String) janusGraphDao.getProperty(edge, EdgePropertyEnum.STATE);
+ if (edgeState.equals(state)) {
+ edge.remove();
+ }
+ }
+ }
+
+
+ protected void updateEdgeProperty(EdgePropertyEnum property, String value, Iterator<Edge> edges) throws IOException {
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+ Map<EdgePropertyEnum, Object> prop = new HashMap<>();
+ prop.put(property, value);
+ janusGraphDao.setEdgeProperties(edge, prop);
+ }
+
+ }
+
+
+ // check if user has both edges state and last_state
+ protected boolean sameUser(List<JanusGraphVertex> stateList, List<JanusGraphVertex> lastStateList) {
+
+ for (JanusGraphVertex lsVertex : lastStateList) {
+ String idLs = (String) janusGraphDao.getProperty(lsVertex, GraphPropertyEnum.USERID.getProperty());
+ String idSt = (String) janusGraphDao.getProperty(stateList.get(0), GraphPropertyEnum.USERID.getProperty());
+ if (idLs.equals(idSt)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ protected List<JanusGraphVertex> getVertexByEdgeSide(Iterator<Edge> edges, SdcCollapsingRolesRFCstateMigration.EdgeSide side) {
+ List<JanusGraphVertex> vertexList = new ArrayList();
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+
+ if (side == SdcCollapsingRolesRFCstateMigration.EdgeSide.OUT) {
+ vertexList.add((JanusGraphVertex) edge.outVertex());
+ } else {
+ vertexList.add((JanusGraphVertex) edge.inVertex());
+ }
+ }
+
+ return vertexList;
+ }
+
+ protected Iterator<Edge> getVertexEdge(GraphVertex containerV, Direction direction, EdgeLabelEnum edgeLabel) {
+ return containerV.getVertex().edges(direction, edgeLabel.name());
+ }
+
+ public enum EdgeSide {
+ IN, OUT;
+ }
+}
+
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
index d51271bd69..758589cb53 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
@@ -82,7 +82,7 @@ public class XlsOutputHandler implements OutputHandler {
file.close();
return true;
} catch (Exception e) {
- log.debug("#writeOutputAndCloseFile - Failed to write an output file. ", e);
+ log.debug("#writeOutputAndCloseFile - Failed to write an output file. The {} exception occurred. ", e.getMessage());
return false;
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
index 49cd1fea88..8eda8640c6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
@@ -26,12 +26,14 @@ import fj.data.Either;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
import org.openecomp.sdc.be.components.impl.ComponentInstanceBusinessLogic;
import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.exceptions.ByActionStatusComponentException;
import org.openecomp.sdc.be.components.impl.exceptions.ByResponseFormatComponentException;
import org.openecomp.sdc.be.components.impl.exceptions.ComponentException;
import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
@@ -47,22 +49,37 @@ import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.datatypes.enums.*;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentInstanceProperty;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.jsonjanusgraph.utils.ModelConverter;
-import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.openecomp.sdc.exception.ResponseFormat;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@@ -103,7 +120,7 @@ public class UpgradeMigration1710 implements PostMigration {
private LifecycleBusinessLogic lifecycleBusinessLogic;
@Autowired
- private IUserAdminOperation userAdminOperation;
+ private UserAdminOperation userAdminOperation;
@Autowired
private ResourceBusinessLogic resourceBusinessLogic;
@@ -481,20 +498,25 @@ public class UpgradeMigration1710 implements PostMigration {
private Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateComposition(org.openecomp.sdc.be.model.Component component) {
if (component != null && component.getComponentInstances() != null) {
- Either<ComponentInstance, ResponseFormat> upgradeInstanceRes;
for (ComponentInstance instance : component.getComponentInstances()) {
- upgradeInstanceRes = upgradeInstance(component, instance);
- if (upgradeInstanceRes.isRight()) {
- log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage());
- outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage());
- return Either.right(upgradeInstanceRes.right().value());
+ try {
+ upgradeInstance(component, instance);
+ }catch (ComponentException e){
+ ResponseFormat responseFormat = e.getResponseFormat();
+ log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(),
+ component.getInvariantUUID(), component.getVersion(), "upgradeInstance",
+ responseFormat.getFormattedMessage());
+ outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(),
+ component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(),
+ responseFormat.getFormattedMessage());
+ return Either.right(responseFormat);
}
}
}
return Either.left(component);
}
- private Either<ComponentInstance, ResponseFormat> upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
+ private ComponentInstance upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
ComponentInstance newComponentInstance = new ComponentInstance(instance);
if (instance.getOriginType() == OriginTypeEnum.ServiceProxy) {
@@ -503,35 +525,33 @@ public class UpgradeMigration1710 implements PostMigration {
return upgradeResourceInstance(component, instance, newComponentInstance);
}
- private Either<ComponentInstance, ResponseFormat> upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
+ private ComponentInstance upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
- Either<ComponentInstance, ResponseFormat> upgradeInstanceRes = null;
+ ComponentInstance upgradeInstanceRes = null;
VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
if(getOriginRes.isRight()){
log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
- upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+ throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType()));
}
- if(upgradeInstanceRes == null) {
- copyComponentNameAndVersionToNewInstance(newComponentInstance, getOriginRes.left().value());
+ copyComponentNameAndVersionToNewInstance(newComponentInstance, getOriginRes.left().value());
- if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
- upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
- }
- if((upgradeInstanceRes == null || upgradeInstanceRes.isLeft()) && isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
- ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes.left().value();
- upgradeInstanceRes = Either.left(updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId())));
- }
+ if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
+ upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
+ }
+ if(isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
+ ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes;
+ upgradeInstanceRes = updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId()));
}
//upgrade nodes contained by CVFC
if(upgradeInstanceRes == null && isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC &&
!upgradeVf(getOriginRes.left().value().getUniqueId(), false, true)) {
- upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
+ throw new ByActionStatusComponentException(ActionStatus.GENERAL_ERROR);
}
if(upgradeInstanceRes == null){
- upgradeInstanceRes = Either.left(instance);
+ upgradeInstanceRes = instance;
}
log.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ",
component.getComponentType().getValue(), instance.getName());
@@ -584,17 +604,17 @@ public class UpgradeMigration1710 implements PostMigration {
return isAllottedResource(component.getUniqueId());
}
- private Either<ComponentInstance, ResponseFormat> upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
+ private ComponentInstance upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
Either<List<GraphVertex>, JanusGraphOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(instance.getSourceModelInvariant());
if (getLatestOriginServiceRes.isRight()) {
- return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertJanusGraphStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType())));
+ throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertJanusGraphStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType()));
}
ModelConverter.getVertexType(instance.getOriginType().name());
Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestByName(instance.getComponentName());
if(getOriginRes.isRight()){
log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
- return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+ throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType()));
}
newComponentInstance.setComponentUid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
return changeAssetVersion(component, instance, newComponentInstance);
@@ -613,7 +633,7 @@ public class UpgradeMigration1710 implements PostMigration {
.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata);
}
- private Either<ComponentInstance, ResponseFormat> changeAssetVersion(org.openecomp.sdc.be.model.Component containerComponent, ComponentInstance instance, ComponentInstance newComponentInstance) {
+ private ComponentInstance changeAssetVersion(org.openecomp.sdc.be.model.Component containerComponent, ComponentInstance instance, ComponentInstance newComponentInstance) {
return componentInstanceBusinessLogic.changeComponentInstanceVersion(ComponentTypeEnum.SERVICE_PARAM_NAME, containerComponent.getUniqueId(), instance.getUniqueId(), user.getUserId(), newComponentInstance);
}
@@ -857,7 +877,7 @@ public class UpgradeMigration1710 implements PostMigration {
log.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
log.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
Either<List<GraphVertex>, JanusGraphOperationStatus> parentResourceRes = janusGraphDao
- .getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
+ .getParentVertices(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
if (parentResourceRes.isRight() && parentResourceRes.right().value() != JanusGraphOperationStatus.NOT_FOUND) {
return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(parentResourceRes.right().value());
@@ -916,18 +936,9 @@ public class UpgradeMigration1710 implements PostMigration {
private Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> performFullCertification(org.openecomp.sdc.be.model.Component component) {
log.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ",
component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
-
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false);
- if (changeStateEither.isRight()) {
- log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST);
- return changeStateEither;
- }
- changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.START_CERTIFICATION, changeInfo, true, false);
- if (changeStateEither.isRight()) {
- log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION);
- return changeStateEither;
- }
- changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
+ org.openecomp.sdc.be.model.Component updatedComponent = component;
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither;
+ changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), updatedComponent.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
if (changeStateEither.isRight()) {
log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
} else {
@@ -997,6 +1008,7 @@ public class UpgradeMigration1710 implements PostMigration {
propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, componentType.name());
propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+
Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
if (vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
index 812c07e6b0..f0f59c4450 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
@@ -17,33 +17,23 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
import com.google.common.collect.ImmutableSet;
-import org.janusgraph.core.JanusGraphVertex;
import fj.data.Either;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
import org.apache.tinkerpop.gremlin.structure.Direction;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraphVertex;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.datatypes.elements.ForwardingPathDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
@@ -56,7 +46,18 @@ import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
-@org.springframework.stereotype.Component
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+//@org.springframework.stereotype.Component
public class ForwardPathMigration implements Migration {
private JanusGraphDao janusGraphDao;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
index ea1e4a5529..8f7fc3eeb5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
@@ -21,6 +21,7 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
import fj.data.Either;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
@@ -36,7 +37,6 @@ import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
index bcb236338f..a28c27fe6c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
@@ -24,34 +24,35 @@ import fj.data.Either;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.Migration;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
-import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.MapPropertiesDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
-import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTemplateOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.stereotype.Component;
import java.math.BigInteger;
-import java.util.*;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.Map.Entry;
+import java.util.Optional;
@Component
-public class SDCInstancesMigration implements Migration {
+public class SDCInstancesMigration extends InstanceMigrationBase implements Migration {
- private JanusGraphDao janusGraphDao;
private NodeTemplateOperation nodeTemplateOperation;
private static final Logger log = Logger.getLogger(SDCInstancesMigration.class);
@@ -62,7 +63,7 @@ public class SDCInstancesMigration implements Migration {
public SDCInstancesMigration(JanusGraphDao janusGraphDao, NodeTemplateOperation nodeTemplateOperation) {
- this.janusGraphDao = janusGraphDao;
+ super(janusGraphDao);
this.nodeTemplateOperation = nodeTemplateOperation;
}
@@ -78,41 +79,14 @@ public class SDCInstancesMigration implements Migration {
@Override
public MigrationResult migrate() {
- StorageOperationStatus status = connectAllContainers();
-
+ StorageOperationStatus status = upgradeTopologyTemplates();
return status == StorageOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to create connection between instances and origins. Error : " + status);
}
- private StorageOperationStatus connectAllContainers() {
- StorageOperationStatus status;
- Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
- hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
- hasNotProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC);
-
- status = janusGraphDao
- .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, hasNotProps, JsonParseFlagEnum.ParseAll)
- .either(this::connectAll, this::handleError);
- return status;
- }
-
- private StorageOperationStatus handleError(JanusGraphOperationStatus err) {
- return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(
- JanusGraphOperationStatus.NOT_FOUND == err ? JanusGraphOperationStatus.OK : err);
- }
-
- private StorageOperationStatus connectAll(List<GraphVertex> containersV) {
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
StorageOperationStatus status = StorageOperationStatus.OK;
- for (GraphVertex container : containersV) {
- status = handleOneContainer(container);
- if (status != StorageOperationStatus.OK) {
- break;
- }
- }
- return status;
- }
- private StorageOperationStatus handleOneContainer(GraphVertex containerV) {
- StorageOperationStatus status = StorageOperationStatus.OK;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
boolean needConnectAllotted = false;
ComponentTypeEnum componentType = containerV.getType();
@@ -154,6 +128,7 @@ public class SDCInstancesMigration implements Migration {
return status;
}
+
private Either<Map<String, MapPropertiesDataDefinition>, StorageOperationStatus> getInstProperties(GraphVertex containerV) {
Map<String, MapPropertiesDataDefinition> instanceProperties;
Either<GraphVertex, JanusGraphOperationStatus> instProps = janusGraphDao
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
index 35e795095e..fd71336fd5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
@@ -17,10 +17,6 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
import fj.data.Either;
-import java.math.BigInteger;
-import java.util.EnumMap;
-import java.util.List;
-import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
@@ -45,6 +41,11 @@ import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
import org.openecomp.sdc.common.log.wrappers.Logger;
import org.springframework.beans.factory.annotation.Autowired;
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
@org.springframework.stereotype.Component
public class InterfaceOperationMigration implements Migration {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java
new file mode 100644
index 0000000000..16f0c485dc
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java
@@ -0,0 +1,108 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.resources.data.ConsumerData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.List;
+
+@Component
+public class SdcConsumerMigration implements Migration {
+
+ private static final Logger logger = LoggerFactory.getLogger(SdcConsumerMigration.class);
+
+ private JanusGraphGenericDao janusGraphGenericDao;
+
+ public SdcConsumerMigration(JanusGraphGenericDao janusGraphGenericDao) {
+ this.janusGraphGenericDao = janusGraphGenericDao;
+ }
+
+ @Override
+ public String description() {
+ return "remove all consumer nodes";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ JanusGraphOperationStatus status = null;
+ try {
+ status = handleConsumerNodes();
+ if (status == JanusGraphOperationStatus.OK){
+ logger.info("removed all consumer nodes.");
+ return MigrationResult.success();
+ } else {
+ return MigrationResult.error("failed to remove consumer nodes. error: " + status);
+ }
+ } finally {
+ commitOrRollBack(status);
+ }
+ }
+
+ private void commitOrRollBack(JanusGraphOperationStatus status) {
+ if (status == JanusGraphOperationStatus.OK) {
+ janusGraphGenericDao.commit();
+ } else {
+ janusGraphGenericDao.rollback();
+ }
+ }
+
+ private JanusGraphOperationStatus handleConsumerNodes() {
+ logger.info("getting all consumer nodes.");
+ return janusGraphGenericDao.getAll(NodeTypeEnum.ConsumerCredentials, ConsumerData.class)
+ .either(this::removeConsumerNodes, this::handleError);
+ }
+
+ private JanusGraphOperationStatus removeConsumerNodes(List<ConsumerData> consumerNodes){
+ logger.info("found {} consumer nodes.", consumerNodes.size());
+ return consumerNodes.stream()
+ .map(consumerNode -> janusGraphGenericDao.deleteNode(consumerNode, ConsumerData.class))
+ .filter(Either::isRight)
+ .map(either -> either.right().value())
+ .findAny()
+ .orElse(JanusGraphOperationStatus.OK);
+ }
+
+ private JanusGraphOperationStatus handleError(JanusGraphOperationStatus status){
+ if (status == JanusGraphOperationStatus.NOT_FOUND) {
+ logger.info("found 0 consumer nodes.");
+ return JanusGraphOperationStatus.OK;
+ } else{
+ return status;
+ }
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java
new file mode 100644
index 0000000000..1045634ef9
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java
@@ -0,0 +1,186 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.datatypes.elements.GroupDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcGroupsMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = LoggerFactory.getLogger(SdcGroupsMigration.class);
+
+ private final GroupTypeOperation groupTypeOperation;
+
+ private Map<String, GroupTypeDefinition> latestGroupTypeMap = new HashMap<>();
+
+ public enum GroupsForUpgrade {
+ NW_COLLECTION_GROUP_NAME("org.openecomp.groups.NetworkCollection"),
+ VFC_INSTANCE_GROUP_NAME("org.openecomp.groups.VfcInstanceGroup");
+
+ private String toscaType;
+
+ GroupsForUpgrade(String toscaType) {
+ this.toscaType = toscaType;
+ }
+
+ public static boolean containsToscaType(String type) {
+ try {
+ return Arrays.stream(values()).anyMatch(g->g.getToscaType().equals(type));
+ }
+ catch (IllegalArgumentException ex) {
+ return false;
+ }
+ }
+
+ public String getToscaType() {
+ return toscaType;
+ }
+
+ }
+ public SdcGroupsMigration(JanusGraphDao janusGraphDao, GroupTypeOperation groupTypeOperation) {
+ super(janusGraphDao);
+ this.groupTypeOperation = groupTypeOperation;
+ }
+
+ @Override
+ public String description() {
+ return "update derived from field value for NetworkCollection and VfcInstanceGroup group instances ";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ loadLatestGroupTypeDefinitions();
+ StorageOperationStatus status = upgradeTopologyTemplates();
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to update derived from value for NetworkCollection and VfcInstanceGroup group instances. Error : " + status);
+ }
+
+ void loadLatestGroupTypeDefinitions() {
+ Arrays.stream(GroupsForUpgrade.values()).forEach(this::getLatestGroupTypeDefinition);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+
+ try {
+ status = janusGraphDao.getChildVertex(containerV, EdgeLabelEnum.GROUPS, JsonParseFlagEnum.ParseAll)
+ .either(this::updateGroupPropertiesIfRequired, this::handleError);
+ }
+ catch (Exception e) {
+ log.error("Exception occurred:", e);
+ status = StorageOperationStatus.GENERAL_ERROR;
+ }
+ finally {
+ if (status != StorageOperationStatus.OK) {
+ janusGraphDao.rollback();
+ if (status == StorageOperationStatus.NOT_FOUND) {
+ //it is happy flow as well
+ status = StorageOperationStatus.OK;
+ }
+ }
+ if (log.isInfoEnabled()) {
+ log.info("Upgrade status is <{}> for topology template <{}> uniqueId <{}>",
+ status.name(), containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+ containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+ }
+ }
+ return status;
+ }
+
+ private StorageOperationStatus updateGroupPropertiesIfRequired(GraphVertex vertex) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ boolean isUpdated = false;
+ Map<String, GroupDataDefinition> groupDefinitionMap = (Map<String, GroupDataDefinition>) vertex.getJson();
+ for (GroupDataDefinition groupDef : groupDefinitionMap.values()) {
+ if (GroupsForUpgrade.containsToscaType(groupDef.getType())) {
+ if (log.isDebugEnabled()) {
+ log.debug("Group instance named <{}> of type <{}> is supposed to be updated on vertex <{}>",
+ groupDef.getName(), groupDef.getType(), vertex.getUniqueId());
+ }
+ isUpdated = isGroupPropertiesUpdateDone(groupDef.getProperties(), latestGroupTypeMap.get(groupDef.getType()).getProperties());
+ if (log.isDebugEnabled()) {
+ String result = isUpdated ? "has been updated" : "is up to date ";
+ log.debug("Group instance named <{}> of type <{}> uniqueID <{}> {} on vertex <{}>",
+ groupDef.getName(), groupDef.getType(), groupDef.getUniqueId(), result, vertex.getUniqueId());
+ }
+ }
+ }
+ if (isUpdated) {
+ vertex.setJson(groupDefinitionMap);
+ status = updateVertexAndCommit(vertex);
+ if (status == StorageOperationStatus.OK && log.isDebugEnabled()) {
+ log.debug("Group properties change is committed on vertex <{}>", vertex.getUniqueId());
+ }
+ }
+ return status;
+ }
+
+ private boolean isGroupPropertiesUpdateDone(List<PropertyDataDefinition> curPropDefList, List<PropertyDefinition> latestGroupDefList) {
+ boolean isUpdated = false;
+ for (PropertyDefinition prop: latestGroupDefList) {
+ if (curPropDefList.stream().noneMatch(l->l.getName().equals(prop.getName()))) {
+ curPropDefList.add(prop);
+ isUpdated = true;
+ }
+ }
+ return isUpdated;
+ }
+
+ StorageOperationStatus getLatestGroupTypeDefinition(GroupsForUpgrade groupsForUpgrade) {
+ return groupTypeOperation.getLatestGroupTypeByType(groupsForUpgrade.getToscaType(), false)
+ .either(g-> {
+ latestGroupTypeMap.put(groupsForUpgrade.getToscaType(), g);
+ return StorageOperationStatus.OK;
+ }, err->err);
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java
new file mode 100644
index 0000000000..837abf1c08
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java
@@ -0,0 +1,187 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcResourceIconMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = Logger.getLogger(SdcResourceIconMigration.class);
+
+ private Map <String, String> resourceTypeToIconMap = new HashMap<>();
+
+ @VisibleForTesting
+ SdcResourceIconMigration(JanusGraphDao janusGraphDao) {
+ super(janusGraphDao);
+ }
+
+
+ @Override
+ public String description() {
+ return "update iconPath for VL and CP nodes";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status;
+ try {
+ updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.VL);
+ updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.CP);
+
+ if (!resourceTypeToIconMap.isEmpty()) {
+ status = upgradeTopologyTemplates();
+ } else {
+ log.error("No VL and CP node definitions found");
+ status = StorageOperationStatus.NOT_FOUND;
+ }
+ }
+ catch(Exception e) {
+ log.error("Exception thrown: {}", e);
+ status = StorageOperationStatus.GENERAL_ERROR;
+ }
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to update iconPath for VL and CP nodes. Error : " + status);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.OK;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+
+ Map<String, CompositionDataDefinition> jsonComposition = (Map<String, CompositionDataDefinition>)containerV.getJson();
+ if (jsonComposition != null && !jsonComposition.isEmpty()) {
+ CompositionDataDefinition compositionDataDefinition = jsonComposition.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+ Map<String, ComponentInstanceDataDefinition> componentInstances = compositionDataDefinition.getComponentInstances();
+
+ long updateCount = componentInstances.values()
+ .stream()
+ .filter(this::updateIconInsideInstance).count();
+ if (updateCount > 0) {
+ status = updateVertexAndCommit(containerV);
+ }
+ }
+ else {
+ log.warn("No json found for template <{}> uniqueId <{}>",
+ containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+ containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+ }
+ if (log.isInfoEnabled()) {
+ log.info("Upgrade status is <{}> for topology template <{}> uniqueId <{}>",
+ status.name(), containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+ containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+ }
+ return status;
+ }
+
+
+ @VisibleForTesting
+ boolean updateIconInsideInstance(ComponentInstanceDataDefinition componentInstanceDataDefinition) {
+ String iconPath = resourceTypeToIconMap.get(componentInstanceDataDefinition.getComponentName());
+ if (iconPath != null) {
+ componentInstanceDataDefinition.setIcon(iconPath);
+ if (log.isDebugEnabled()) {
+ log.debug("Icon of component {} is set to {}", componentInstanceDataDefinition.getComponentName(), iconPath);
+ }
+ return true;
+ }
+ return false;
+ }
+
+ @VisibleForTesting
+ void updateNodeTypeIconAndStoreInMap(ResourceTypeEnum resourceType) {
+ Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+
+ propertiesToMatch.put(GraphPropertyEnum.RESOURCE_TYPE, resourceType.name());
+ propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+
+ propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+
+ String iconPath = String.valueOf(resourceType.getValue()).toLowerCase();
+
+ Map<String, String> resourceNameToIconMap = janusGraphDao.getByCriteria(VertexTypeEnum.NODE_TYPE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll)
+ .either(vl-> updateIconResource(vl, iconPath), status->null);
+
+ if (resourceNameToIconMap != null) {
+ resourceTypeToIconMap.putAll(resourceNameToIconMap);
+ }
+ else {
+ log.warn("Failed to get resources of type <{}>", resourceType.name());
+ }
+ }
+
+ private Map <String, String> updateIconResource(List<GraphVertex> vertexList, String iconPath) {
+ if (vertexList.isEmpty()) {
+ return null;
+ }
+ Map <String, String> nameToIconMap = new HashMap<>();
+ vertexList.forEach(v->{
+ StorageOperationStatus status = updateIconOnVertex(v, iconPath);
+ if (status == StorageOperationStatus.OK) {
+ if (log.isDebugEnabled()) {
+ log.debug("Node type's {} icon is updated to {}", v.getMetadataProperty(GraphPropertyEnum.NAME), iconPath);
+ }
+ nameToIconMap.put(String.valueOf(v.getMetadataProperty(GraphPropertyEnum.NAME)), iconPath);
+ }
+ else {
+ log.error("Failed to update node type {} icon due to a reason: {}",
+ v.getMetadataProperty(GraphPropertyEnum.NAME), status);
+ throw new RuntimeException("Node update failure");
+ }
+ });
+ return nameToIconMap;
+ }
+
+ private StorageOperationStatus updateIconOnVertex(GraphVertex vertex, String iconPath) {
+ vertex.setJsonMetadataField(JsonPresentationFields.ICON, iconPath);
+ return updateVertexAndCommit(vertex);
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java
new file mode 100644
index 0000000000..cba562750a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1911;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.Collection;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+@Component
+public class SdcDeploymentArtTimeOutMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = LoggerFactory.getLogger(SdcDeploymentArtTimeOutMigration.class);
+ private static Integer defaultTimeOut = 120;
+
+ public SdcDeploymentArtTimeOutMigration(JanusGraphDao janusGraphDao) {
+ super(janusGraphDao);
+ }
+
+ @Override
+ public String description() {
+ return "update instance deployment artifact timeOut to default value 120 minutes";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1911), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status = updateDeploymentArtifactTimeOut();
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to update instance deployment artifact timeOut. Error : " + status);
+ }
+
+ protected StorageOperationStatus updateDeploymentArtifactTimeOut() {
+ Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+ Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+ return byCriteria.either(this::proceed, this::handleError);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+ try {
+ Either<GraphVertex, JanusGraphOperationStatus> childVertex = janusGraphDao.getChildVertex(containerV, EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, JsonParseFlagEnum.ParseAll);
+ GraphVertex instDeployArt = childVertex.left().value();
+ Collection<MapArtifactDataDefinition> values = (Collection<MapArtifactDataDefinition>) instDeployArt.getJson().values();
+ List<ArtifactDataDefinition> artifactDataDefinitionsList = values.stream().map(f -> f.getMapToscaDataDefinition().values()).flatMap(f -> f.stream().filter(isRelevantArtifact())).collect(Collectors.toList());
+ artifactDataDefinitionsList.forEach(t -> t.setTimeout(defaultTimeOut));
+ status = updateVertexAndCommit(instDeployArt);
+
+ } catch (NullPointerException e) {
+ log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+ status = StorageOperationStatus.OK;
+ }
+ catch (Exception e) {
+ //it is happy flow as well
+ log.error("Exception occurred:", e);
+ log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+ if (containerV != null){
+ log.error("containerV.getUniqueId() {} ---> ", containerV.getUniqueId());
+ }
+
+ status = StorageOperationStatus.OK;
+ } finally {
+ if (status != StorageOperationStatus.OK) {
+ janusGraphDao.rollback();
+ log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+ if (status == StorageOperationStatus.NOT_FOUND) {
+ //it is happy flow as well
+ status = StorageOperationStatus.OK;
+ }
+ }
+ else{
+ log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+ }
+
+ }
+ return status;
+ }
+
+ private static Predicate<ArtifactDataDefinition> isRelevantArtifact() {
+
+ return p -> ((p.getArtifactType().equals(ArtifactTypeEnum.HEAT.getType()) || p.getArtifactType().equals(ArtifactTypeEnum.HEAT_VOL.getType()) || p.getArtifactType().equals(ArtifactTypeEnum.HEAT_NET.getType()))
+ && p.getTimeout() != defaultTimeOut);
+
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java
new file mode 100644
index 0000000000..1d7d3d1298
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java
@@ -0,0 +1,139 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.openecomp.sdc.asdctool.enums.DistributionStatusEnum;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesCERTIFIEDstateMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesCERTIFIEDstateMigration.class);
+
+ public SdcCollapsingRolesCERTIFIEDstateMigration(JanusGraphDao janusGraphDao) {
+ super(janusGraphDao);
+ }
+
+ @Override
+ public String description() {
+ return "remove LS=READY_FOR_CERTIFICATION edge from service node + migrate DISTRIBUTION approved/rejected states to <waiting for distribution> state";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status = updateServiceLifeCycleState();
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+ }
+
+ protected StorageOperationStatus updateServiceLifeCycleState() {
+ Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+ Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+ return byCriteria.either(this::proceed, this::handleError);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+ try {
+
+ //update edges to meet above change
+ // update LS eges from RFC to NOT_CERTIFIED_CHECKIN
+
+ updateEdgeProperty(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name(), getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+
+ if (containerV.getMetadataProperty(GraphPropertyEnum.DISTRIBUTION_STATUS).equals(DistributionStatusEnum.DISTRIBUTION_APPROVED.name()) || containerV.getMetadataProperty(GraphPropertyEnum.DISTRIBUTION_STATUS).equals(DistributionStatusEnum.DISTRIBUTION_REJECTED.name())) {
+
+ // update vertex state property from DISTRIBUTION_APPROVED/REJECTED to DISTRIBUTION_NOT_APPROVED state
+
+ Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+ metadataProperties.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTION_NOT_APPROVED.name());
+ containerV.setMetadataProperties(metadataProperties);
+
+ //update edges to meet above change
+ //delete LAST_DISTRIBUTION_STATE_MODIFIER edge
+
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_DISTRIBUTION_STATE_MODIFIER));
+
+ }
+
+ status = updateVertexAndCommit(containerV);
+
+ } catch (NullPointerException e) {
+ log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+ status = StorageOperationStatus.EXEUCTION_FAILED;
+ } catch (Exception e) {
+ //it is happy flow as well
+ log.error("Exception occurred:", e);
+ log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+ if (containerV != null) {
+ log.error("containerV.getUniqueId() ---> {} ", containerV.getUniqueId());
+ }
+
+ } finally {
+ if (status != StorageOperationStatus.OK) {
+ janusGraphDao.rollback();
+ log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+ log.info("Storage Operation Status {}", status.toString());
+ } else {
+ log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+ }
+
+ }
+ return status;
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java
new file mode 100644
index 0000000000..463ccd8695
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java
@@ -0,0 +1,153 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesCIPstateMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesCIPstateMigration.class);
+
+ public SdcCollapsingRolesCIPstateMigration(JanusGraphDao janusGraphDao) {
+ super(janusGraphDao);
+ }
+
+ @Override
+ public String description() {
+ return "update Service state from CERTIFICATION_IN_PROGRES to NOT_CERTIFIED_CHECKOUT state ";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status = updateServiceLifeCycleState();
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+ }
+
+ protected StorageOperationStatus updateServiceLifeCycleState() {
+ Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFICATION_IN_PROGRESS.name());
+ propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+ Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+ Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+ return byCriteria.either(this::proceed, this::handleError);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+ try {
+
+ // update vertex state property from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKIN state
+
+ Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+ metadataProperties.put(GraphPropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+ containerV.setMetadataProperties(metadataProperties);
+
+ //update edges to meet above change
+ // remove STATE and LAST_MODIFIER edges
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_MODIFIER));
+
+ //find designer with LS = NOT_CERTIFIED_CHECKIN
+ Vertex relevantDesigner = findRelevantDesigner(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+ Map<EdgePropertyEnum, Object> edgeProperties = new HashMap<>();
+ edgeProperties.put(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+ JanusGraphOperationStatus createSTedgeStatus = janusGraphDao.createEdge(relevantDesigner, containerV.getVertex(), EdgeLabelEnum.STATE, edgeProperties);
+ JanusGraphOperationStatus createLMedgeStatus = janusGraphDao.createEdge(relevantDesigner, containerV.getVertex(), EdgeLabelEnum.LAST_MODIFIER, new HashMap<>());
+
+ status = updateVertexAndCommit(containerV);
+
+ } catch (NullPointerException e) {
+ log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+ status = StorageOperationStatus.EXEUCTION_FAILED;
+ } catch (Exception e) {
+ //it is happy flow as well
+ log.error("Exception occurred:", e);
+ log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+ if (containerV != null) {
+ log.error("containerV.getUniqueId() ---> {} ", containerV.getUniqueId());
+ }
+
+ } finally {
+ if (status != StorageOperationStatus.OK) {
+ janusGraphDao.rollback();
+ log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+ log.info("Storage Operation Status {}", status.toString());
+ } else {
+ log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+ }
+
+ }
+ return status;
+ }
+
+ private Vertex findRelevantDesigner(Iterator<Edge> edges) {
+ Vertex vertex = null;
+ while (edges.hasNext()) {
+ Edge edge = edges.next();
+ String state = (String) janusGraphDao.getProperty(edge, EdgePropertyEnum.STATE);
+ if (state.equals(LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name())) {
+ vertex = edge.outVertex();
+ }
+ }
+ return vertex;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java
new file mode 100644
index 0000000000..559715ef5d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java
@@ -0,0 +1,147 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.janusgraph.core.JanusGraphVertex;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesRFCstateMigration extends InstanceMigrationBase implements Migration {
+
+ private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesRFCstateMigration.class);
+
+ public SdcCollapsingRolesRFCstateMigration(JanusGraphDao janusGraphDao) {
+ super(janusGraphDao);
+ }
+
+ @Override
+ public String description() {
+ return "update Service state from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKOUT state ";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ StorageOperationStatus status = updateServiceLifeCycleState();
+ return status == StorageOperationStatus.OK ?
+ MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+ }
+
+ protected StorageOperationStatus updateServiceLifeCycleState() {
+ Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.READY_FOR_CERTIFICATION.name());
+ propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+ Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+ propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+ Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+ return byCriteria.either(this::proceed, this::handleError);
+ }
+
+ @Override
+ protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+ StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+ GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+ try {
+
+ // update vertex state property from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKIN state
+
+ Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+ metadataProperties.put(GraphPropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+ containerV.setMetadataProperties(metadataProperties);
+
+ //update edges to meet above change
+
+ List<JanusGraphVertex> stateEdgesOutVertexList = getVertexByEdgeSide(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE), EdgeSide.OUT);
+ List<JanusGraphVertex> lastStateEdgesOutVertexList = getVertexByEdgeSide(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE), EdgeSide.OUT);
+
+ if (sameUser(stateEdgesOutVertexList, lastStateEdgesOutVertexList)) {
+ updateEdgeProperty(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name(), getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+ } else {
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+ removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+ Map<EdgePropertyEnum, Object> edgeProperties = new HashMap<>();
+ edgeProperties.put(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+ janusGraphDao.createEdge(lastStateEdgesOutVertexList.get(0), containerV.getVertex(), EdgeLabelEnum.STATE, edgeProperties);
+
+ }
+
+ status = updateVertexAndCommit(containerV);
+
+ } catch (NullPointerException e) {
+ log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+ status = StorageOperationStatus.EXEUCTION_FAILED;
+ } catch (Exception e) {
+ //it is happy flow as well
+ log.error("Exception occurred:", e);
+ log.error("Migration task will ?" +
+ "" +
+ "" +
+ ", please find below vertex details related to this exception", e);
+ if (containerV != null) {
+ log.error("containerV.getUniqueId() ---> {} ", containerV.getUniqueId());
+ }
+
+ } finally {
+ if (status != StorageOperationStatus.OK) {
+ janusGraphDao.rollback();
+ log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+ log.info("Storage Operation Status {}", status.toString());
+ } else {
+ log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+ }
+
+ }
+ return status;
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
index e7d39ffb3e..412926fa6c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
@@ -20,11 +20,11 @@
package org.openecomp.sdc.asdctool.servlets;
-import org.janusgraph.core.JanusGraph;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.janusgraph.core.JanusGraph;
import org.openecomp.sdc.asdctool.Utils;
import org.openecomp.sdc.common.log.wrappers.Logger;
@@ -34,7 +34,13 @@ import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-import java.io.*;
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
import java.util.Map.Entry;
import java.util.Properties;
//import com.tinkerpop.blueprints.util.io.graphml.GraphMLWriter;
@@ -146,7 +152,7 @@ public class ExportImportJanusGraphServlet {
result = outputFile;
} catch (Exception e) {
- log.info("export Graph failed - {}" , e);
+ e.printStackTrace();
// graph.rollback();
graph.tx().rollback();
} finally {
@@ -155,7 +161,7 @@ public class ExportImportJanusGraphServlet {
out.close();
}
} catch (IOException e) {
- log.info("close FileOutputStream failed - {}" , e);
+ e.printStackTrace();
}
}
return result;
diff --git a/asdctool/src/main/resources/application-context.xml b/asdctool/src/main/resources/application-context.xml
index c9a13df44a..252b951895 100644
--- a/asdctool/src/main/resources/application-context.xml
+++ b/asdctool/src/main/resources/application-context.xml
@@ -1,11 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:util="http://www.springframework.org/schema/util"
xsi:schemaLocation="
- http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
- http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd">
-
-
- <util:properties id="elasticsearchConfig" location="file:${config.home}/elasticsearch.yml" />
-
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
</beans>
diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml
index cc7a3cf295..93bb2de2be 100644
--- a/asdctool/src/main/resources/config/configuration.yaml
+++ b/asdctool/src/main/resources/config/configuration.yaml
@@ -23,12 +23,13 @@ beSslPort: 8443
version: 1.0
released: 2012-11-30
-toscaConformanceLevel: 8.0
+toscaConformanceLevel: 12.0
minToscaConformanceLevel: 3.0
# These values are necessary for running upgrade migration 1710.0 process
enableAutoHealing: false
appVersion: 1.1.0
+artifactGeneratorConfig: Artifact-Generator.properties
resourcesForUpgrade:
8.0:
- org.openecomp.resource.cp.extCP
@@ -53,8 +54,6 @@ janusGraphReconnectIntervalInSeconds: 3
# The read timeout towards JanusGraph DB when health check is invoked:
janusGraphHealthCheckReadTimeout: 1
-# The interval to try and reconnect to Elasticsearch when it is down during ASDC startup:
-esReconnectIntervalInSeconds: 3
uebHealthCheckReconnectIntervalInSeconds: 15
uebHealthCheckReadTimeout: 4
@@ -107,30 +106,6 @@ cassandraConfig:
- { name: sdccomponent, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
- { name: sdcrepository, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
-
-#Application-specific settings of ES
-elasticSearch:
- # Mapping of index prefix to time-based frame. For example, if below is configured:
- #
- # - indexPrefix: auditingevents
- # creationPeriod: minute
- #
- # then ES object of type which is mapped to "auditingevents-*" template, and created on 2015-12-23 13:24:54, will enter "auditingevents-2015-12-23-13-24" index.
- # Another object created on 2015-12-23 13:25:54, will enter "auditingevents-2015-12-23-13-25" index.
- # If creationPeriod: month, both of the above will enter "auditingevents-2015-12" index.
- #
- # PLEASE NOTE: the timestamps are created in UTC/GMT timezone! This is needed so that timestamps will be correctly presented in Kibana.
- #
- # Legal values for creationPeriod - year, month, day, hour, minute, none (meaning no time-based behaviour).
- #
- # If no creationPeriod is configured for indexPrefix, default behavour is creationPeriod: month.
-
- indicesTimeFrequency:
- - indexPrefix: auditingevents
- creationPeriod: month
- - indexPrefix: monitoring_events
- creationPeriod: month
-
artifactTypes:
- CHEF
- PUPPET
@@ -176,11 +151,6 @@ resourceTypes: &allResourceTypes
# - VF
# - VL
deploymentResourceArtifacts:
- cdsBlueprint:
- displayName: "CDS Blueprint"
- type: CONTROLLER_BLUEPRINT_ARCHIVE
- description: "CDS deployment artifact"
- fileExtension: "zip"
# heat:
# displayName: "Base HEAT Template"
# type: HEAT
@@ -318,7 +288,10 @@ systemMonitoring:
isProxy: false
probeIntervalInSeconds: 15
-defaultHeatArtifactTimeoutMinutes: 60
+heatArtifactDeploymentTimeout:
+ defaultMinutes: 30
+ minMinutes: 1
+ maxMinutes: 120
serviceDeploymentArtifacts:
CONTROLLER_BLUEPRINT_ARCHIVE:
@@ -506,7 +479,6 @@ resourceDeploymentArtifacts:
ONBOARDED_PACKAGE:
acceptedTypes:
- csar
- - zip
validForResourceTypes:
- VF
- PNF
@@ -614,10 +586,6 @@ resourceInformationalArtifacts:
resourceInformationalDeployedArtifacts:
-requirementsToFulfillBeforeCert:
-
-capabilitiesToConsumeBeforeCert:
-
unLoggedUrls:
- /sdc2/rest/healthCheck
@@ -701,3 +669,59 @@ genericAssetNodeTypes:
VF : org.openecomp.resource.abstract.nodes.VF
PNF: org.openecomp.resource.abstract.nodes.PNF
Service: org.openecomp.resource.abstract.nodes.service
+
+dmaapConsumerConfiguration:
+ hosts: olsd004.wnsnet.attws.com:3905
+ consumerGroup: asdc
+ consumerId: mama #mama - in Order To Consume Remove This String And Replace It With -> mama
+ timeoutMs: 15000
+ limit: 1
+ pollingInterval: 2
+ topic: com.att.sdc.23911-SDCforTestDev-v001
+ latitude: 32.109333
+ longitude: 34.855499
+ version: 1.0
+ serviceName: dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/events
+ environment: TEST
+ partner: BOT_R
+ routeOffer: MR1
+ protocol: https
+ contenttype: application/json
+ dme2TraceOn: true
+ aftEnvironment: AFTUAT
+ aftDme2ConnectionTimeoutMs: 15000
+ aftDme2RoundtripTimeoutMs: 240000
+ aftDme2ReadTimeoutMs: 50000
+ dme2preferredRouterFilePath: DME2preferredRouter.txt
+ timeLimitForNotificationHandleMs: 120000
+ credential:
+ username: m09875@sdc.att.com
+ password: hmXYcznAljMSisdy8zgcag==
+
+dmaapProducerConfiguration:
+ hosts: olsd004.wnsnet.attws.com:3905
+ consumerGroup: asdc
+ consumerId: mama #mama - in Order To Consume Remove This String And Replace It With -> mama
+ timeoutMs: 15000
+ limit: 1
+ pollingInterval: 2
+ topic: com.att.sdc.23911-SDCforTestDev-v001
+ latitude: 32.109333
+ longitude: 34.855499
+ version: 1.0
+ serviceName: dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/events
+ environment: TEST
+ partner: BOT_R
+ routeOffer: MR1
+ protocol: https
+ contenttype: application/json
+ dme2TraceOn: true
+ aftEnvironment: AFTUAT
+ aftDme2ConnectionTimeoutMs: 15000
+ aftDme2RoundtripTimeoutMs: 240000
+ aftDme2ReadTimeoutMs: 50000
+ dme2preferredRouterFilePath: DME2preferredRouter.txt
+ timeLimitForNotificationHandleMs: 120000
+ credential:
+ username: m09875@sdc.att.com
+ password: hmXYcznAljMSisdy8zgcag== \ No newline at end of file
diff --git a/asdctool/src/main/resources/config/dataTypes.yml b/asdctool/src/main/resources/config/dataTypes.yml
index d768bffe78..43c7f0c844 100644
--- a/asdctool/src/main/resources/config/dataTypes.yml
+++ b/asdctool/src/main/resources/config/dataTypes.yml
@@ -114,12 +114,12 @@ org.openecomp.datatypes.heat.network.AddressPair:
type: string
description: MAC address
required: false
- status: supported
+ status: SUPPORTED
ip_address:
type: string
description: IP address
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.subnet.HostRoute:
derived_from: tosca.datatypes.Root
description: Host route info for the subnet
@@ -128,12 +128,12 @@ org.openecomp.datatypes.heat.network.subnet.HostRoute:
type: string
description: The destination for static route
required: false
- status: supported
+ status: SUPPORTED
nexthop:
type: string
description: The next hop for the destination
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.AllocationPool:
derived_from: tosca.datatypes.Root
@@ -143,12 +143,12 @@ org.openecomp.datatypes.heat.network.AllocationPool:
type: string
description: Start address for the allocation pool
required: false
- status: supported
+ status: SUPPORTED
end:
type: string
description: End address for the allocation pool
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.neutron.Subnet:
derived_from: tosca.datatypes.Root
@@ -158,18 +158,18 @@ org.openecomp.datatypes.heat.network.neutron.Subnet:
type: string
description: The ID of the tenant who owns the network
required: false
- status: supported
+ status: SUPPORTED
enable_dhcp:
type: boolean
description: Set to true if DHCP is enabled and false if DHCP is disabled
required: false
default: true
- status: supported
+ status: SUPPORTED
ipv6_address_mode:
type: string
description: IPv6 address mode
required: false
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- dhcpv6-stateful
@@ -179,7 +179,7 @@ org.openecomp.datatypes.heat.network.neutron.Subnet:
type: string
description: IPv6 RA (Router Advertisement) mode
required: false
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- dhcpv6-stateful
@@ -191,35 +191,35 @@ org.openecomp.datatypes.heat.network.neutron.Subnet:
required: false
default: {
}
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
allocation_pools:
type: list
description: The start and end addresses for the allocation pools
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.network.AllocationPool
subnetpool:
type: string
description: The name or ID of the subnet pool
required: false
- status: supported
+ status: SUPPORTED
dns_nameservers:
type: list
description: A specified set of DNS name servers to be used
required: false
default: [
]
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
host_routes:
type: list
description: The gateway IP address
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.network.subnet.HostRoute
ip_version:
@@ -227,7 +227,7 @@ org.openecomp.datatypes.heat.network.neutron.Subnet:
description: The gateway IP address
required: false
default: 4
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- '4'
@@ -236,24 +236,24 @@ org.openecomp.datatypes.heat.network.neutron.Subnet:
type: string
description: The name of the subnet
required: false
- status: supported
+ status: SUPPORTED
prefixlen:
type: integer
description: Prefix length for subnet allocation from subnet pool
required: false
- status: supported
+ status: SUPPORTED
constraints:
- greater_or_equal: 0
cidr:
type: string
description: The CIDR
required: false
- status: supported
+ status: SUPPORTED
gateway_ip:
type: string
description: The gateway IP address
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.novaServer.network.PortExtraProperties:
derived_from: tosca.datatypes.Root
@@ -263,35 +263,35 @@ org.openecomp.datatypes.heat.novaServer.network.PortExtraProperties:
type: boolean
description: Flag to enable/disable port security on the port
required: false
- status: supported
+ status: SUPPORTED
mac_address:
type: string
description: MAC address to give to this port
required: false
- status: supported
+ status: SUPPORTED
admin_state_up:
type: boolean
description: The administrative state of this port
required: false
default: true
- status: supported
+ status: SUPPORTED
qos_policy:
type: string
description: The name or ID of QoS policy to attach to this port
required: false
- status: supported
+ status: SUPPORTED
allowed_address_pairs:
type: list
description: Additional MAC/IP address pairs allowed to pass through the port
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.network.AddressPair
binding:vnic_type:
type: string
description: The vnic type to be bound on the neutron port
required: false
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- macvtap
@@ -303,7 +303,7 @@ org.openecomp.datatypes.heat.novaServer.network.PortExtraProperties:
required: false
default: {
}
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
org.openecomp.datatypes.heat.novaServer.network.AddressInfo:
@@ -314,7 +314,7 @@ org.openecomp.datatypes.heat.novaServer.network.AddressInfo:
type: string
description: Port id
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.neutron.port.FixedIps:
derived_from: tosca.datatypes.Root
description: subnet/ip_address
@@ -323,12 +323,12 @@ org.openecomp.datatypes.heat.neutron.port.FixedIps:
type: string
description: Subnet in which to allocate the IP address for this port
required: false
- status: supported
+ status: SUPPORTED
ip_address:
type: string
description: IP address desired in the subnet for this port
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.FileInfo:
derived_from: tosca.datatypes.Root
description: Heat File Info
@@ -337,12 +337,12 @@ org.openecomp.datatypes.heat.FileInfo:
type: string
description: The required URI string (relative or absolute) which can be used to locate the file
required: true
- status: supported
+ status: SUPPORTED
file_type:
type: string
description: The type of the file
required: true
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- base
@@ -357,12 +357,12 @@ org.openecomp.datatypes.heat.contrail.network.rule.PortPairs:
type: string
description: Start port
required: false
- status: supported
+ status: SUPPORTED
end_port:
type: string
description: End port
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrail.network.rule.Rule:
derived_from: tosca.datatypes.Root
description: policy rule
@@ -371,45 +371,45 @@ org.openecomp.datatypes.heat.contrail.network.rule.Rule:
type: list
description: Source ports
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
protocol:
type: string
description: Protocol
required: false
- status: supported
+ status: SUPPORTED
dst_addresses:
type: list
description: Destination addresses
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
apply_service:
type: string
description: Service to apply
required: false
- status: supported
+ status: SUPPORTED
dst_ports:
type: list
description: Destination ports
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
src_addresses:
type: list
description: Source addresses
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
direction:
type: string
description: Direction
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrail.network.rule.RuleList:
derived_from: tosca.datatypes.Root
description: list of policy rules
@@ -418,7 +418,7 @@ org.openecomp.datatypes.heat.contrail.network.rule.RuleList:
type: list
description: Contrail network rule
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrail.network.rule.Rule
org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork:
@@ -429,7 +429,7 @@ org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork:
type: string
description: Virtual network
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
derived_from: tosca.datatypes.Root
@@ -439,12 +439,12 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
type: string
description: The remote group ID to be associated with this security group rule
required: false
- status: supported
+ status: SUPPORTED
protocol:
type: string
description: The protocol that is matched by the security group rule
required: false
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- tcp
@@ -455,7 +455,7 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
description: Ethertype of the traffic
required: false
default: IPv4
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- IPv4
@@ -465,7 +465,7 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
description: 'The maximum port number in the range that is matched by the
security group rule. '
required: false
- status: supported
+ status: SUPPORTED
constraints:
- in_range:
- 0
@@ -474,13 +474,13 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
type: string
description: The remote IP prefix (CIDR) to be associated with this security group rule
required: false
- status: supported
+ status: SUPPORTED
remote_mode:
type: string
description: Whether to specify a remote group or a remote IP prefix
required: false
default: remote_ip_prefix
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- remote_ip_prefix
@@ -490,7 +490,7 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
description: The direction in which the security group rule is applied
required: false
default: ingress
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- egress
@@ -499,7 +499,7 @@ org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
type: integer
description: The minimum port number in the range that is matched by the security group rule.
required: false
- status: supported
+ status: SUPPORTED
constraints:
- in_range:
- 0
@@ -512,13 +512,13 @@ org.openecomp.datatypes.heat.substitution.SubstitutionFiltering:
type: string
description: Substitute Service Template
required: true
- status: supported
+ status: SUPPORTED
index_value:
type: integer
description: Index value of the substitution service template runtime instance
required: false
default: 0
- status: supported
+ status: SUPPORTED
constraints:
- greater_or_equal: 0
count:
@@ -526,19 +526,19 @@ org.openecomp.datatypes.heat.substitution.SubstitutionFiltering:
description: Count
required: false
default: 1
- status: supported
+ status: SUPPORTED
scaling_enabled:
type: boolean
description: Indicates whether service scaling is enabled
required: false
default: true
- status: supported
+ status: SUPPORTED
mandatory:
type: boolean
description: Mandatory
required: false
default: true
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence:
derived_from: tosca.datatypes.Root
description: network policy refs data sequence
@@ -547,12 +547,12 @@ org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence:
type: integer
description: Network Policy ref data sequence Major
required: false
- status: supported
+ status: SUPPORTED
network_policy_refs_data_sequence_minor:
type: integer
description: Network Policy ref data sequence Minor
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefData:
derived_from: tosca.datatypes.Root
description: network policy refs data
@@ -561,7 +561,7 @@ org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefData:
type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence
description: Network Policy ref data sequence
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet:
derived_from: tosca.datatypes.Root
description: Network Ipam Ref Data Subnet
@@ -570,12 +570,12 @@ org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet
type: string
description: Network ipam refs data ipam subnets ip prefix len
required: false
- status: supported
+ status: SUPPORTED
network_ipam_refs_data_ipam_subnets_subnet_ip_prefix:
type: string
description: Network ipam refs data ipam subnets ip prefix
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList:
derived_from: tosca.datatypes.Root
description: Network Ipam Ref Data Subnet List
@@ -584,12 +584,12 @@ org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet
type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet
description: Network ipam refs data ipam subnets
required: false
- status: supported
+ status: SUPPORTED
network_ipam_refs_data_ipam_subnets_addr_from_start:
type: string
description: Network ipam refs data ipam subnets addr from start
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.IpamRefData:
derived_from: tosca.datatypes.Root
description: Network Ipam Ref Data
@@ -598,7 +598,7 @@ org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.IpamRefData:
type: list
description: Network ipam refs data ipam subnets
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList
org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork:
@@ -609,7 +609,7 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork:
type: string
description: Source addresses Virtual network
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork:
derived_from: tosca.datatypes.Root
description: destination addresses
@@ -618,7 +618,7 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork:
type: string
description: Destination addresses Virtual network
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs:
derived_from: tosca.datatypes.Root
description: destination port pairs
@@ -627,12 +627,12 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs:
type: string
description: Start port
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_dst_ports_end_port:
type: string
description: End port
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs:
derived_from: tosca.datatypes.Root
description: source port pairs
@@ -641,12 +641,12 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs:
type: string
description: Start port
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_src_ports_end_port:
type: string
description: End port
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
derived_from: tosca.datatypes.Root
description: Action List
@@ -655,12 +655,12 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
type: string
description: Simple Action
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_action_list_apply_service:
type: list
description: Apply Service
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
@@ -671,12 +671,12 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
type: string
description: Simple Action
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_action_list_apply_service:
type: list
description: Apply Service
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
org.openecomp.datatypes.heat.contrailV2.network.rule.Rule:
@@ -687,45 +687,45 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.Rule:
type: list
description: Destination addresses
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork
network_policy_entries_policy_rule_dst_ports:
type: list
description: Destination ports
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs
network_policy_entries_policy_rule_protocol:
type: string
description: Protocol
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_src_addresses:
type: list
description: Source addresses
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork
network_policy_entries_policy_rule_direction:
type: string
description: Direction
required: false
- status: supported
+ status: SUPPORTED
network_policy_entries_policy_rule_src_ports:
type: list
description: Source ports
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs
network_policy_entries_policy_rule_action_list:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList
description: Action list
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.network.rule.RuleList:
derived_from: tosca.datatypes.Root
description: list of policy rules
@@ -734,7 +734,7 @@ org.openecomp.datatypes.heat.contrailV2.network.rule.RuleList:
type: list
description: Contrail network rule
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.network.rule.Rule
org.openecomp.datatypes.heat.network.contrail.port.StaticRoute:
@@ -745,17 +745,17 @@ org.openecomp.datatypes.heat.network.contrail.port.StaticRoute:
type: string
description: Route prefix
required: false
- status: supported
+ status: SUPPORTED
next_hop:
type: string
description: Next hop
required: false
- status: supported
+ status: SUPPORTED
next_hop_type:
type: string
description: Next hop type
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.contrail.AddressPair:
derived_from: tosca.datatypes.Root
description: Address Pair
@@ -764,7 +764,7 @@ org.openecomp.datatypes.heat.network.contrail.AddressPair:
type: string
description: Address mode active-active or active-standy
required: false
- status: supported
+ status: SUPPORTED
constraints:
- valid_values:
- active-active
@@ -773,12 +773,12 @@ org.openecomp.datatypes.heat.network.contrail.AddressPair:
type: string
description: IP address prefix
required: false
- status: supported
+ status: SUPPORTED
mac_address:
type: string
description: Mac address
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.network.contrail.InterfaceData:
derived_from: tosca.datatypes.Root
description: Interface Data
@@ -787,26 +787,26 @@ org.openecomp.datatypes.heat.network.contrail.InterfaceData:
type: list
description: An ordered list of static routes to be added to this interface
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.network.contrail.port.StaticRoute
virtual_network:
type: string
description: Virtual Network for this interface
required: true
- status: supported
+ status: SUPPORTED
allowed_address_pairs:
type: list
description: List of allowed address pair for this interface
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.network.contrail.AddressPair
ip_address:
type: string
description: IP for this interface
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.machine.interface.Properties:
derived_from: tosca.datatypes.Root
description: Virtual Machine Interface Properties.
@@ -815,7 +815,7 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.interface.Properties:
type: string
description: Service Interface Type.
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.Root:
derived_from: tosca.datatypes.Root
description: >
@@ -1061,12 +1061,12 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair
type: string
description: IP Prefix.
required: false
- status: supported
+ status: SUPPORTED
ip_prefix_len:
type: integer
description: IP Prefix Len.
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.MacAddress:
derived_from: tosca.datatypes.Root
@@ -1076,7 +1076,7 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.MacAddress:
type: list
description: Mac Addresses List.
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: string
@@ -1088,7 +1088,7 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.Properties:
type: string
description: Sub Interface VLAN Tag.
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair:
derived_from: tosca.datatypes.Root
@@ -1098,17 +1098,17 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair
type: string
description: Address Mode.
required: false
- status: supported
+ status: SUPPORTED
ip:
type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairIp
description: IP.
required: false
- status: supported
+ status: SUPPORTED
mac:
type: string
description: Mac.
required: false
- status: supported
+ status: SUPPORTED
org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairs:
derived_from: tosca.datatypes.Root
@@ -1118,6 +1118,6 @@ org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair
type: list
description: Addresses pair List.
required: false
- status: supported
+ status: SUPPORTED
entry_schema:
type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair \ No newline at end of file
diff --git a/asdctool/src/main/resources/config/elasticsearch.yml b/asdctool/src/main/resources/config/elasticsearch.yml
deleted file mode 100644
index 38482e2b02..0000000000
--- a/asdctool/src/main/resources/config/elasticsearch.yml
+++ /dev/null
@@ -1,393 +0,0 @@
-
-elasticSearch.local: true
-elasticSearch.transportclient: false
-cluster.name: elasticsearch
-
-discovery.zen.ping.multicast.enabled: false
-discovery.zen.ping.unicast.enabled: true
-discovery.zen.ping.unicast.hosts: elasticsearch_host
-transport.client.initial_nodes:
- - elasticsearch_host:9300
-
-http.cors.enabled: true
-
-#plugin.types: "DeleteByQueryPlugin"
-
-##################### Elasticsearch Configuration Example #####################
-
-# This file contains an overview of various configuration settings,
-# targeted at operations staff. Application developers should
-# consult the guide at <http://elasticsearch.org/guide>.
-#
-# The installation procedure is covered at
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html>.
-#
-# Elasticsearch comes with reasonable defaults for most settings,
-# so you can try it out without bothering with configuration.
-#
-# Most of the time, these defaults are just fine for running a production
-# cluster. If you're fine-tuning your cluster, or wondering about the
-# effect of certain configuration option, please _do ask_ on the
-# mailing list or IRC channel [http://elasticsearch.org/community].
-
-# Any element in the configuration can be replaced with environment variables
-# by placing them in ${...} notation. For example:
-#
-# node.rack: ${RACK_ENV_VAR}
-
-# For information on supported formats and syntax for the config file, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup-configuration.html>
-
-
-################################### Cluster ###################################
-
-# Cluster name identifies your cluster for auto-discovery. If you're running
-# multiple clusters on the same network, make sure you're using unique names.
-#
-# cluster.name: elasticsearch
-
-
-#################################### Node #####################################
-
-# Node names are generated dynamically on startup, so you're relieved
-# from configuring them manually. You can tie this node to a specific name:
-#
-# node.name: "Franz Kafka"
-
-# Every node can be configured to allow or deny being eligible as the master,
-# and to allow or deny to store the data.
-#
-# Allow this node to be eligible as a master node (enabled by default):
-#
-# node.master: true
-#
-# Allow this node to store data (enabled by default):
-#
-# node.data: true
-
-# You can exploit these settings to design advanced cluster topologies.
-#
-# 1. You want this node to never become a master node, only to hold data.
-# This will be the "workhorse" of your cluster.
-#
-# node.master: false
-# node.data: true
-#
-# 2. You want this node to only serve as a master: to not store any data and
-# to have free resources. This will be the "coordinator" of your cluster.
-#
-# node.master: true
-# node.data: false
-#
-# 3. You want this node to be neither master nor data node, but
-# to act as a "search load balancer" (fetching data from nodes,
-# aggregating results, etc.)
-#
-# node.master: false
-# node.data: false
-
-# Use the Cluster Health API [http://localhost:9200/_cluster/health], the
-# Node Info API [http://localhost:9200/_nodes] or GUI tools
-# such as <http://www.elasticsearch.org/overview/marvel/>,
-# <http://github.com/karmi/elasticsearch-paramedic>,
-# <http://github.com/lukas-vlcek/bigdesk> and
-# <http://mobz.github.com/elasticsearch-head> to inspect the cluster state.
-
-# A node can have generic attributes associated with it, which can later be used
-# for customized shard allocation filtering, or allocation awareness. An attribute
-# is a simple key value pair, similar to node.key: value, here is an example:
-#
-# node.rack: rack314
-
-# By default, multiple nodes are allowed to start from the same installation location
-# to disable it, set the following:
-# node.max_local_storage_nodes: 1
-
-
-#################################### Index ####################################
-
-# You can set a number of options (such as shard/replica options, mapping
-# or analyzer definitions, translog settings, ...) for indices globally,
-# in this file.
-#
-# Note, that it makes more sense to configure index settings specifically for
-# a certain index, either when creating it or by using the index templates API.
-#
-# See <http://elasticsearch.org/guide/en/elasticsearch/reference/current/index-modules.html> and
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-# for more information.
-
-# Set the number of shards (splits) of an index (5 by default):
-#
-# index.number_of_shards: 5
-
-# Set the number of replicas (additional copies) of an index (1 by default):
-#
-# index.number_of_replicas: 1
-
-# Note, that for development on a local machine, with small indices, it usually
-# makes sense to "disable" the distributed features:
-#
-index.number_of_shards: 1
-index.number_of_replicas: 0
-
-# These settings directly affect the performance of index and search operations
-# in your cluster. Assuming you have enough machines to hold shards and
-# replicas, the rule of thumb is:
-#
-# 1. Having more *shards* enhances the _indexing_ performance and allows to
-# _distribute_ a big index across machines.
-# 2. Having more *replicas* enhances the _search_ performance and improves the
-# cluster _availability_.
-#
-# The "number_of_shards" is a one-time setting for an index.
-#
-# The "number_of_replicas" can be increased or decreased anytime,
-# by using the Index Update Settings API.
-#
-# Elasticsearch takes care about load balancing, relocating, gathering the
-# results from nodes, etc. Experiment with different settings to fine-tune
-# your setup.
-
-# Use the Index Status API (<http://localhost:9200/A/_status>) to inspect
-# the index status.
-
-
-#################################### Paths ####################################
-path.home: /src/test/resources
-# Path to directory containing configuration (this file and logging.yml):
-#
-path.conf: /src/test/resources
-
-# Path to directory where to store index data allocated for this node.
-#
-path.data: target/esdata
-#
-# Can optionally include more than one location, causing data to be striped across
-# the locations (a la RAID 0) on a file level, favouring locations with most free
-# space on creation. For example:
-#
-# path.data: /path/to/data1,/path/to/data2
-
-# Path to temporary files:
-#
-path.work: /target/eswork
-
-# Path to log files:
-#
-path.logs: /target/eslogs
-
-# Path to where plugins are installed:
-#
-# path.plugins: /path/to/plugins
-
-
-#################################### Plugin ###################################
-
-# If a plugin listed here is not installed for current node, the node will not start.
-#
-# plugin.mandatory: mapper-attachments,lang-groovy
-
-
-################################### Memory ####################################
-
-# Elasticsearch performs poorly when JVM starts swapping: you should ensure that
-# it _never_ swaps.
-#
-# Set this property to true to lock the memory:
-#
-# bootstrap.mlockall: true
-
-# Make sure that the ES_MIN_MEM and ES_MAX_MEM environment variables are set
-# to the same value, and that the machine has enough memory to allocate
-# for Elasticsearch, leaving enough memory for the operating system itself.
-#
-# You should also make sure that the Elasticsearch process is allowed to lock
-# the memory, eg. by using `ulimit -l unlimited`.
-
-
-############################## Network And HTTP ###############################
-
-# Elasticsearch, by default, binds itself to the 0.0.0.0 address, and listens
-# on port [9200-9300] for HTTP traffic and on port [9300-9400] for node-to-node
-# communication. (the range means that if the port is busy, it will automatically
-# try the next port).
-
-# Set the bind address specifically (IPv4 or IPv6):
-#
-# network.bind_host: 192.168.0.1
-
-# Set the address other nodes will use to communicate with this node. If not
-# set, it is automatically derived. It must point to an actual IP address.
-#
-# network.publish_host: 192.168.0.1
-
-# Set both 'bind_host' and 'publish_host':
-#
-# network.host: 192.168.0.1
-
-# Set a custom port for the node to node communication (9300 by default):
-#
-# transport.tcp.port: 9300
-
-# Enable compression for all communication between nodes (disabled by default):
-#
-# transport.tcp.compress: true
-
-# Set a custom port to listen for HTTP traffic:
-#
-# http.port: 9200
-
-# Set a custom allowed content length:
-#
-# http.max_content_length: 100mb
-
-# Disable HTTP completely:
-#
-# http.enabled: false
-
-
-################################### Gateway ###################################
-
-# The gateway allows for persisting the cluster state between full cluster
-# restarts. Every change to the state (such as adding an index) will be stored
-# in the gateway, and when the cluster starts up for the first time,
-# it will read its state from the gateway.
-
-# There are several types of gateway implementations. For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-gateway.html>.
-
-# The default gateway type is the "local" gateway (recommended):
-#
-# gateway.type: local
-
-# Settings below control how and when to start the initial recovery process on
-# a full cluster restart (to reuse as much local data as possible when using shared
-# gateway).
-
-# Allow recovery process after N nodes in a cluster are up:
-#
-gateway.recover_after_nodes: 1
-
-# Set the timeout to initiate the recovery process, once the N nodes
-# from previous setting are up (accepts time value):
-#
-# gateway.recover_after_time: 5m
-
-# Set how many nodes are expected in this cluster. Once these N nodes
-# are up (and recover_after_nodes is met), begin recovery process immediately
-# (without waiting for recover_after_time to expire):
-#
-gateway.expected_nodes: 1
-
-
-############################# Recovery Throttling #############################
-
-# These settings allow to control the process of shards allocation between
-# nodes during initial recovery, replica allocation, rebalancing,
-# or when adding and removing nodes.
-
-# Set the number of concurrent recoveries happening on a node:
-#
-# 1. During the initial recovery
-#
-# cluster.routing.allocation.node_initial_primaries_recoveries: 4
-#
-# 2. During adding/removing nodes, rebalancing, etc
-#
-# cluster.routing.allocation.node_concurrent_recoveries: 2
-
-# Set to throttle throughput when recovering (eg. 100mb, by default 20mb):
-#
-# indices.recovery.max_bytes_per_sec: 20mb
-
-# Set to limit the number of open concurrent streams when
-# recovering a shard from a peer:
-#
-# indices.recovery.concurrent_streams: 5
-
-
-################################## Discovery ##################################
-
-# Discovery infrastructure ensures nodes can be found within a cluster
-# and master node is elected. Multicast discovery is the default.
-
-# Set to ensure a node sees N other master eligible nodes to be considered
-# operational within the cluster. Its recommended to set it to a higher value
-# than 1 when running more than 2 nodes in the cluster.
-#
-# discovery.zen.minimum_master_nodes: 1
-
-# Set the time to wait for ping responses from other nodes when discovering.
-# Set this option to a higher value on a slow or congested network
-# to minimize discovery failures:
-#
-# discovery.zen.ping.timeout: 3s
-
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-zen.html>
-
-# Unicast discovery allows to explicitly control which nodes will be used
-# to discover the cluster. It can be used when multicast is not present,
-# or to restrict the cluster communication-wise.
-#
-# 1. Disable multicast discovery (enabled by default):
-#
-# discovery.zen.ping.multicast.enabled: false
-#
-# 2. Configure an initial list of master nodes in the cluster
-# to perform discovery when new nodes (master or data) are started:
-#
-# discovery.zen.ping.unicast.hosts: ["host1", "host2:port"]
-
-# EC2 discovery allows to use AWS EC2 API in order to perform discovery.
-#
-# You have to install the cloud-aws plugin for enabling the EC2 discovery.
-#
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-ec2.html>
-#
-# See <http://elasticsearch.org/tutorials/elasticsearch-on-ec2/>
-# for a step-by-step tutorial.
-
-# GCE discovery allows to use Google Compute Engine API in order to perform discovery.
-#
-# You have to install the cloud-gce plugin for enabling the GCE discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-gce>.
-
-# Azure discovery allows to use Azure API in order to perform discovery.
-#
-# You have to install the cloud-azure plugin for enabling the Azure discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-azure>.
-
-################################## Slow Log ##################################
-
-# Shard level query and fetch threshold logging.
-
-#index.search.slowlog.threshold.query.warn: 10s
-#index.search.slowlog.threshold.query.info: 5s
-#index.search.slowlog.threshold.query.debug: 2s
-#index.search.slowlog.threshold.query.trace: 500ms
-
-#index.search.slowlog.threshold.fetch.warn: 1s
-#index.search.slowlog.threshold.fetch.info: 800ms
-#index.search.slowlog.threshold.fetch.debug: 500ms
-#index.search.slowlog.threshold.fetch.trace: 200ms
-
-#index.indexing.slowlog.threshold.index.warn: 10s
-#index.indexing.slowlog.threshold.index.info: 5s
-#index.indexing.slowlog.threshold.index.debug: 2s
-#index.indexing.slowlog.threshold.index.trace: 500ms
-
-################################## GC Logging ################################
-
-#monitor.jvm.gc.young.warn: 1000ms
-#monitor.jvm.gc.young.info: 700ms
-#monitor.jvm.gc.young.debug: 400ms
-
-#monitor.jvm.gc.old.warn: 10s
-#monitor.jvm.gc.old.info: 5s
-#monitor.jvm.gc.old.debug: 2s
-
diff --git a/asdctool/src/main/resources/config/groupTypes.yml b/asdctool/src/main/resources/config/groupTypes.yml
index 0c0abc9013..ce457e4add 100644
--- a/asdctool/src/main/resources/config/groupTypes.yml
+++ b/asdctool/src/main/resources/config/groupTypes.yml
@@ -6,12 +6,12 @@ org.openecomp.groups.heat.HeatStack:
type: string
description: Heat file which associate to this group/heat stack
required: true
- status: supported
+ status: SUPPORTED
description:
type: string
description: group description
required: true
- status: supported
+ status: SUPPORTED
org.openecomp.groups.VfModule:
derived_from: tosca.groups.Root
description: Grouped all heat resources which are in the same VF Module
@@ -21,7 +21,7 @@ org.openecomp.groups.VfModule:
description: Whether this module should be deployed before other modules
required: true
default: false
- status: supported
+ status: SUPPORTED
vf_module_label:
type: string
required: true
diff --git a/asdctool/src/main/resources/config/janusgraph.properties b/asdctool/src/main/resources/config/janusgraph.properties
index 5f22a08837..3e88b0d3c8 100644
--- a/asdctool/src/main/resources/config/janusgraph.properties
+++ b/asdctool/src/main/resources/config/janusgraph.properties
@@ -7,14 +7,14 @@ storage.connection-timeout=10000
storage.cassandra.keyspace=sdctitan
storage.cassandra.ssl.enabled=true
-storage.cassandra.ssl.truststore.location=C:\\gitWork\\vagrant-sdc-all-in-one\\mytmp.trust
+storage.cassandra.ssl.truststore.location=/var/lib/jetty/etc/truststore
storage.cassandra.ssl.truststore.password=Aa123456
storage.cassandra.read-consistency-level=LOCAL_QUORUM
storage.cassandra.write-consistency-level=LOCAL_QUORUM
storage.cassandra.replication-strategy-class=org.apache.cassandra.locator.NetworkTopologyStrategy
-storage.cassandra.replication-strategy-options=DC-sdc-iltlv633,1
-storage.cassandra.astyanax.local-datacenter=DC-sdc-iltlv633
+storage.cassandra.replication-strategy-options=DC-ILTLV2083,1
+storage.cassandra.astyanax.local-datacenter=DC-ILTLV2083
cache.db-cache = false
diff --git a/asdctool/src/main/resources/config/tmp.trust b/asdctool/src/main/resources/config/tmp.trust
new file mode 100644
index 0000000000..f74b8f53cc
--- /dev/null
+++ b/asdctool/src/main/resources/config/tmp.trust
Binary files differ
diff --git a/asdctool/src/main/resources/elasticsearch.yml b/asdctool/src/main/resources/elasticsearch.yml
deleted file mode 100644
index 71ccdbb8f5..0000000000
--- a/asdctool/src/main/resources/elasticsearch.yml
+++ /dev/null
@@ -1,399 +0,0 @@
-
-cluster.name: elasticsearch
-
-discovery.zen.ping.multicast.enabled: false
-discovery.zen.ping.unicast.enabled: true
-discovery.zen.ping.unicast.hosts: elasticsearch_host
-
-http.cors.enabled: true
-
-path.home: "/home/vagrant/catalog-be/config"
-
-elasticSearch.transportclient: true
-
-transport.client.initial_nodes:
- - elasticsearch_host:9300
-
-#shield.user: asdc:Aa12345
-#shield.ssl.keystore.path: "/vagrant/install/resources/catalog-be/keystore/es-client.jks"
-#shield.ssl.keystore.password: Aa123456
-#shield.transport.ssl: true
-
-##################### Elasticsearch Configuration Example #####################
-
-# This file contains an overview of various configuration settings,
-# targeted at operations staff. Application developers should
-# consult the guide at <http://elasticsearch.org/guide>.
-#
-# The installation procedure is covered at
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html>.
-#
-# Elasticsearch comes with reasonable defaults for most settings,
-# so you can try it out without bothering with configuration.
-#
-# Most of the time, these defaults are just fine for running a production
-# cluster. If you're fine-tuning your cluster, or wondering about the
-# effect of certain configuration option, please _do ask_ on the
-# mailing list or IRC channel [http://elasticsearch.org/community].
-
-# Any element in the configuration can be replaced with environment variables
-# by placing them in ${...} notation. For example:
-#
-# node.rack: ${RACK_ENV_VAR}
-
-# For information on supported formats and syntax for the config file, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup-configuration.html>
-
-
-################################### Cluster ###################################
-
-# Cluster name identifies your cluster for auto-discovery. If you're running
-# multiple clusters on the same network, make sure you're using unique names.
-#
-# cluster.name: elasticsearch
-
-
-#################################### Node #####################################
-
-# Node names are generated dynamically on startup, so you're relieved
-# from configuring them manually. You can tie this node to a specific name:
-#
-# node.name: "Franz Kafka"
-
-# Every node can be configured to allow or deny being eligible as the master,
-# and to allow or deny to store the data.
-#
-# Allow this node to be eligible as a master node (enabled by default):
-#
-# node.master: true
-#
-# Allow this node to store data (enabled by default):
-#
-# node.data: true
-
-# You can exploit these settings to design advanced cluster topologies.
-#
-# 1. You want this node to never become a master node, only to hold data.
-# This will be the "workhorse" of your cluster.
-#
-# node.master: false
-# node.data: true
-#
-# 2. You want this node to only serve as a master: to not store any data and
-# to have free resources. This will be the "coordinator" of your cluster.
-#
-# node.master: true
-# node.data: false
-#
-# 3. You want this node to be neither master nor data node, but
-# to act as a "search load balancer" (fetching data from nodes,
-# aggregating results, etc.)
-#
-# node.master: false
-# node.data: false
-
-# Use the Cluster Health API [http://localhost:9200/_cluster/health], the
-# Node Info API [http://localhost:9200/_nodes] or GUI tools
-# such as <http://www.elasticsearch.org/overview/marvel/>,
-# <http://github.com/karmi/elasticsearch-paramedic>,
-# <http://github.com/lukas-vlcek/bigdesk> and
-# <http://mobz.github.com/elasticsearch-head> to inspect the cluster state.
-
-# A node can have generic attributes associated with it, which can later be used
-# for customized shard allocation filtering, or allocation awareness. An attribute
-# is a simple key value pair, similar to node.key: value, here is an example:
-#
-# node.rack: rack314
-
-# By default, multiple nodes are allowed to start from the same installation location
-# to disable it, set the following:
-# node.max_local_storage_nodes: 1
-
-
-#################################### Index ####################################
-
-# You can set a number of options (such as shard/replica options, mapping
-# or analyzer definitions, translog settings, ...) for indices globally,
-# in this file.
-#
-# Note, that it makes more sense to configure index settings specifically for
-# a certain index, either when creating it or by using the index templates API.
-#
-# See <http://elasticsearch.org/guide/en/elasticsearch/reference/current/index-modules.html> and
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-# for more information.
-
-# Set the number of shards (splits) of an index (5 by default):
-#
-# index.number_of_shards: 5
-
-# Set the number of replicas (additional copies) of an index (1 by default):
-#
-# index.number_of_replicas: 1
-
-# Note, that for development on a local machine, with small indices, it usually
-# makes sense to "disable" the distributed features:
-#
-index.number_of_shards: 1
-index.number_of_replicas: 0
-
-# These settings directly affect the performance of index and search operations
-# in your cluster. Assuming you have enough machines to hold shards and
-# replicas, the rule of thumb is:
-#
-# 1. Having more *shards* enhances the _indexing_ performance and allows to
-# _distribute_ a big index across machines.
-# 2. Having more *replicas* enhances the _search_ performance and improves the
-# cluster _availability_.
-#
-# The "number_of_shards" is a one-time setting for an index.
-#
-# The "number_of_replicas" can be increased or decreased anytime,
-# by using the Index Update Settings API.
-#
-# Elasticsearch takes care about load balancing, relocating, gathering the
-# results from nodes, etc. Experiment with different settings to fine-tune
-# your setup.
-
-# Use the Index Status API (<http://localhost:9200/A/_status>) to inspect
-# the index status.
-
-
-#################################### Paths ####################################
-
-# Path to directory containing configuration (this file and logging.yml):
-#
-path.conf: /src/test/resources
-
-# Path to directory where to store index data allocated for this node.
-#
-path.data: target/esdata
-#
-# Can optionally include more than one location, causing data to be striped across
-# the locations (a la RAID 0) on a file level, favouring locations with most free
-# space on creation. For example:
-#
-# path.data: /path/to/data1,/path/to/data2
-
-# Path to temporary files:
-#
-path.work: /target/eswork
-
-# Path to log files:
-#
-path.logs: /target/eslogs
-
-# Path to where plugins are installed:
-#
-# path.plugins: /path/to/plugins
-
-
-#################################### Plugin ###################################
-
-# If a plugin listed here is not installed for current node, the node will not start.
-#
-# plugin.mandatory: mapper-attachments,lang-groovy
-
-
-################################### Memory ####################################
-
-# Elasticsearch performs poorly when JVM starts swapping: you should ensure that
-# it _never_ swaps.
-#
-# Set this property to true to lock the memory:
-#
-# bootstrap.mlockall: true
-
-# Make sure that the ES_MIN_MEM and ES_MAX_MEM environment variables are set
-# to the same value, and that the machine has enough memory to allocate
-# for Elasticsearch, leaving enough memory for the operating system itself.
-#
-# You should also make sure that the Elasticsearch process is allowed to lock
-# the memory, eg. by using `ulimit -l unlimited`.
-
-
-############################## Network And HTTP ###############################
-
-# Elasticsearch, by default, binds itself to the 0.0.0.0 address, and listens
-# on port [9200-9300] for HTTP traffic and on port [9300-9400] for node-to-node
-# communication. (the range means that if the port is busy, it will automatically
-# try the next port).
-
-# Set the bind address specifically (IPv4 or IPv6):
-#
-# network.bind_host: 192.168.0.1
-
-# Set the address other nodes will use to communicate with this node. If not
-# set, it is automatically derived. It must point to an actual IP address.
-#
-# network.publish_host: 192.168.0.1
-
-# Set both 'bind_host' and 'publish_host':
-#
-# network.host: 192.168.0.1
-
-# Set a custom port for the node to node communication (9300 by default):
-#
-# transport.tcp.port: 9300
-
-# Enable compression for all communication between nodes (disabled by default):
-#
-# transport.tcp.compress: true
-
-# Set a custom port to listen for HTTP traffic:
-#
-# http.port: 9200
-
-# Set a custom allowed content length:
-#
-# http.max_content_length: 100mb
-
-# Disable HTTP completely:
-#
-# http.enabled: false
-
-
-################################### Gateway ###################################
-
-# The gateway allows for persisting the cluster state between full cluster
-# restarts. Every change to the state (such as adding an index) will be stored
-# in the gateway, and when the cluster starts up for the first time,
-# it will read its state from the gateway.
-
-# There are several types of gateway implementations. For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-gateway.html>.
-
-# The default gateway type is the "local" gateway (recommended):
-#
-# gateway.type: local
-
-# Settings below control how and when to start the initial recovery process on
-# a full cluster restart (to reuse as much local data as possible when using shared
-# gateway).
-
-# Allow recovery process after N nodes in a cluster are up:
-#
-gateway.recover_after_nodes: 1
-
-# Set the timeout to initiate the recovery process, once the N nodes
-# from previous setting are up (accepts time value):
-#
-# gateway.recover_after_time: 5m
-
-# Set how many nodes are expected in this cluster. Once these N nodes
-# are up (and recover_after_nodes is met), begin recovery process immediately
-# (without waiting for recover_after_time to expire):
-#
-gateway.expected_nodes: 1
-
-
-############################# Recovery Throttling #############################
-
-# These settings allow to control the process of shards allocation between
-# nodes during initial recovery, replica allocation, rebalancing,
-# or when adding and removing nodes.
-
-# Set the number of concurrent recoveries happening on a node:
-#
-# 1. During the initial recovery
-#
-# cluster.routing.allocation.node_initial_primaries_recoveries: 4
-#
-# 2. During adding/removing nodes, rebalancing, etc
-#
-# cluster.routing.allocation.node_concurrent_recoveries: 2
-
-# Set to throttle throughput when recovering (eg. 100mb, by default 20mb):
-#
-# indices.recovery.max_bytes_per_sec: 20mb
-
-# Set to limit the number of open concurrent streams when
-# recovering a shard from a peer:
-#
-# indices.recovery.concurrent_streams: 5
-
-
-################################## Discovery ##################################
-
-# Discovery infrastructure ensures nodes can be found within a cluster
-# and master node is elected. Multicast discovery is the default.
-
-# Set to ensure a node sees N other master eligible nodes to be considered
-# operational within the cluster. Its recommended to set it to a higher value
-# than 1 when running more than 2 nodes in the cluster.
-#
-# discovery.zen.minimum_master_nodes: 1
-
-# Set the time to wait for ping responses from other nodes when discovering.
-# Set this option to a higher value on a slow or congested network
-# to minimize discovery failures:
-#
-# discovery.zen.ping.timeout: 3s
-
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-zen.html>
-
-# Unicast discovery allows to explicitly control which nodes will be used
-# to discover the cluster. It can be used when multicast is not present,
-# or to restrict the cluster communication-wise.
-#
-# 1. Disable multicast discovery (enabled by default):
-#
-# discovery.zen.ping.multicast.enabled: false
-#
-# 2. Configure an initial list of master nodes in the cluster
-# to perform discovery when new nodes (master or data) are started:
-#
-# discovery.zen.ping.unicast.hosts: ["host1", "host2:port"]
-
-# EC2 discovery allows to use AWS EC2 API in order to perform discovery.
-#
-# You have to install the cloud-aws plugin for enabling the EC2 discovery.
-#
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-ec2.html>
-#
-# See <http://elasticsearch.org/tutorials/elasticsearch-on-ec2/>
-# for a step-by-step tutorial.
-
-# GCE discovery allows to use Google Compute Engine API in order to perform discovery.
-#
-# You have to install the cloud-gce plugin for enabling the GCE discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-gce>.
-
-# Azure discovery allows to use Azure API in order to perform discovery.
-#
-# You have to install the cloud-azure plugin for enabling the Azure discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-azure>.
-
-################################## Slow Log ##################################
-
-# Shard level query and fetch threshold logging.
-
-#index.search.slowlog.threshold.query.warn: 10s
-#index.search.slowlog.threshold.query.info: 5s
-#index.search.slowlog.threshold.query.debug: 2s
-#index.search.slowlog.threshold.query.trace: 500ms
-
-#index.search.slowlog.threshold.fetch.warn: 1s
-#index.search.slowlog.threshold.fetch.info: 800ms
-#index.search.slowlog.threshold.fetch.debug: 500ms
-#index.search.slowlog.threshold.fetch.trace: 200ms
-
-#index.indexing.slowlog.threshold.index.warn: 10s
-#index.indexing.slowlog.threshold.index.info: 5s
-#index.indexing.slowlog.threshold.index.debug: 2s
-#index.indexing.slowlog.threshold.index.trace: 500ms
-
-################################## GC Logging ################################
-
-#monitor.jvm.gc.young.warn: 1000ms
-#monitor.jvm.gc.young.info: 700ms
-#monitor.jvm.gc.young.debug: 400ms
-
-#monitor.jvm.gc.old.warn: 10s
-#monitor.jvm.gc.old.info: 5s
-#monitor.jvm.gc.old.debug: 2s
-
diff --git a/asdctool/src/main/resources/es-resources/README.txt b/asdctool/src/main/resources/es-resources/README.txt
deleted file mode 100644
index a7006efa80..0000000000
--- a/asdctool/src/main/resources/es-resources/README.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-ASDC elasticsearch tool
-========================
-
-This tool purpose is to ease and allow updating elasticsearch indices.
-
-In order to use the scripts, you need to verify Python is installed and to install the elasticsearc-py library:
- Verify pip is installed: $command -v pip
- if not installed:
- Download https://bootstrap.pypa.io/get-pip.py
- $python get-pip.py (see instruction: https://pip.pypa.io/en/latest/installing/#installing-with-get-pip-py)
- $pip install elasticsearch
-
-
-Tool contains:
- - index_ops.py
- This script includes operations on elasticsearch index:
-
- create index:
- $python index_ops.py -o create -a <elasticsearch hostname> -n <indexName> -f <index mapping file>
-
- delete index:
- $python index_ops.py -o delete -a <elasticsearch hostname> -n <indexName>
-
- copy index (assumes destination index already exists):
- $python index_ops.py -o move -a <elasticsearch hostname> -n <indexName> -t <toIndex>
-
-
- - file_utils.py
- This script includes operations on files
-
- - audit_migration_1602.py
- This script run full flow to migrate audit information from previous versions to ASDC 1602
- It has 2 inputs:
- 1. config_properties.py - this file holds configuration (hostname, index name, index mapping file etc.)
- 2. folder of fields mapping per elasticsearch type (map old field to new field)
- The flow of this script is as follow:
- * create temp index with correct index mapping
- * scan the audit index to get all records
- * manipulate fields data and insert it to temp index
- * delete audit index
- * create audit index with correct mapping
- * copy from temp index to newly created audit index
- * delete temp index \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/auditMappings.txt b/asdctool/src/main/resources/es-resources/auditMappings.txt
deleted file mode 100644
index 7de77cccbd..0000000000
--- a/asdctool/src/main/resources/es-resources/auditMappings.txt
+++ /dev/null
@@ -1,169 +0,0 @@
-{ "settings": {}, "mappings":
-{
-"distributiondownloadevent":
-{ "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "RESOURCE_URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }},
- "_all": { "enabled": true } },
- "auditinggetuebclusterevent":
-{ "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }},
- "_all": { "enabled": true } },
- "distributionstatusevent":
-{ "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "RESOURCE_URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }},
- "_all": { "enabled": true } },
-"distributionengineevent":
-{ "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ROLE": { "include_in_all": true, "type": "string" },
- "API_KEY": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "D_ENV": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }},
- "_all": { "enabled": true } },
- "useraccessevent": {
- "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_NAME": { "include_in_all": true, "type": "string" }} ,
- "_all": { "enabled": true }},
- "resourceadminevent":
- { "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CURR_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_NAME": { "include_in_all": true, "type": "string" },
- "PREV_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "PREV_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "RESOURCE_NAME": { "include_in_all": true, "type": "string" },
- "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DPREV_STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DCURR_STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "COMMENT": { "include_in_all": true, "type": "string" },
- "ARTIFACT_NAME": { "include_in_all": true, "index": "not_analyzed", "type": "string" } },
- "_all": { "enabled": true }} ,
- "useradminevent":
- { "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_NAME": { "include_in_all": true, "type": "string" },
- "USER_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_AFTER_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_BEFORE_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_AFTER_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_NAME": { "include_in_all": true, "type": "string" },
- "USER_BEFORE_NAME": { "include_in_all": true, "type": "string" },
- "USER_BEFORE_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER_AFTER_NAME": { "include_in_all": true, "type": "string" } },
- "_all": { "enabled": true } },
-"distributionnotificationevent":
- {"properties":{
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CURR_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_NAME": { "include_in_all": true, "type": "string" },
- "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "RESOURCE_NAME": { "include_in_all": true, "type": "string" },
- "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" }}},
-"categoryevent":
-{"properties":{
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "MODIFIER": { "include_in_all": true, "type": "string" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CATEGORY_NAME": { "include_in_all": true, "type": "string" },
- "SUB_CATEGORY_NAME": { "include_in_all": true, "type": "string" },
- "GROUPING_NAME": { "include_in_all": true, "type": "string" },
- "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }},
- "_all": { "enabled": true } },
- "authevent": {
- "properties": {
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" },
- "DESC": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "USER": { "include_in_all": true, "type": "string" } ,
- "AUTH_STATUS": { "include_in_all": true, "index": "not_analyzed","type": "string" } ,
- "REALM": { "include_in_all": true, "index": "not_analyzed","type": "string" }} ,
- "_all": { "enabled": true }},
- "consumerevent":
- {"properties":{
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER": { "include_in_all": true, "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "ECOMP_USER": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }},
- "_all": { "enabled": true } },
- "distributiondeployevent":
- { "properties": {
- "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "DESC": { "include_in_all": true, "type": "string" },
- "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "MODIFIER_NAME": { "include_in_all": true, "type": "string" },
- "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "RESOURCE_NAME": { "include_in_all": true, "type": "string" },
- "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
- "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }}}}} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/audit_migration_1602.py b/asdctool/src/main/resources/es-resources/audit_migration_1602.py
deleted file mode 100644
index 8b61ebfaf0..0000000000
--- a/asdctool/src/main/resources/es-resources/audit_migration_1602.py
+++ /dev/null
@@ -1,132 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient
-import sys, os
-from index_ops import createIndex, deleteIndex, copyIndex
-from config_properties import getGlobalVar
-from file_utils import readFileToJson
-
-def updateFieldNames(client, queryFrom, fromIndex, destIndex, addUTC):
- typesDir="types"
- typeFields = {}
- for filename in os.listdir(typesDir):
- print filename
- fieldNames=readFileToJson(typesDir+os.sep+filename)
-
- type=filename.split(".")[0]
- typeFields[type] = fieldNames
-
- client.indices.refresh(index=fromIndex)
- res = elasticsearch.helpers.scan(client, query=queryFrom, index=fromIndex)
-
- actions = []
- for i in res:
- res_type = i['_type']
- fieldNames = typeFields.get(res_type)
- if (fieldNames != None):
- action={}
- for field in i['_source']:
- updatedName=fieldNames.get(field)
- if (updatedName != None):
- if (field == 'timestamp' and addUTC == True):
- value+=" UTC"
- value=i['_source'].get(field)
- action[updatedName]=value
- else:
- action[field]=i['_source'].get(field)
- i['_source']=action
-
- i['_index']=destIndex
- i.pop('_id', None)
- actions.append(i)
-
- bulk_res = elasticsearch.helpers.bulk(client, actions)
- print "bulk response: ", bulk_res
-
-
-
-def updateAllrecordsWithUTC(client, queryFrom, fromIndex, destIndex):
-
- #scan indices
- client.indices.refresh(index=fromIndex)
- res = elasticsearch.helpers.scan(client, query=queryFrom, index=fromIndex)
-
- actions = []
- for i in res:
- print i
- i['_index']=destIndex
- i['_source']['TIMESTAMP']+=" UTC"
- actions.append(i)
-
- bulk_res = elasticsearch.helpers.bulk(client, actions)
- print "bulk response: ", bulk_res
-
-
-def printQueryResults(client, myQuery, indexName):
- client.indices.refresh(index=indexName)
- res = elasticsearch.helpers.scan(client, query=myQuery, index=indexName)
- for i in res:
- print i
-
-def main():
- print "start script for changing fields"
- print "================================="
-
- # initialize es
- es = Elasticsearch([getGlobalVar('host')])
-
- try:
- mapping=readFileToJson(getGlobalVar('mappingFileName'))
- res = createIndex(es, getGlobalVar('tempIndexName'), mapping)
- if (res != 0):
- print "script results in error"
- sys.exit(1)
-
- print "scan audit index and manipulate data"
- print "===================================="
-
- print "start time: ", datetime.now().time()
- updateFieldNames(es, getGlobalVar('matchAllQuery'), getGlobalVar('origIndexName'), getGlobalVar('tempIndexName'), getGlobalVar('addUTC'))
-
- print "re-create original index"
- print "========================="
- res = createIndex(es, getGlobalVar('origIndexName'), mapping)
- if (res != 0):
- print "script results in error"
- sys.exit(1)
-
- print "copy data from temp index to original"
- print "======================================="
- res = copyIndex(es, getGlobalVar('tempIndexName'), getGlobalVar('origIndexName'))
- if (res != 0):
- print "script results in error"
- sys.exit(1)
-
- print "delete temp index"
- print "=================="
- res = deleteIndex(es, getGlobalVar('tempIndexName'))
- if (res != 0):
- print "script results in error"
- sys.exit(1)
-
-
- print "end time: ", datetime.now().time()
-
- except Exception, error:
- print "An exception was thrown!"
- print str(error)
- return 2
-
-
-if __name__ == "__main__":
- main()
-
-
-
-
-
diff --git a/asdctool/src/main/resources/es-resources/config_properties.py b/asdctool/src/main/resources/es-resources/config_properties.py
deleted file mode 100644
index d0973001dc..0000000000
--- a/asdctool/src/main/resources/es-resources/config_properties.py
+++ /dev/null
@@ -1,11 +0,0 @@
-globalVars={
- "host": "127.0.0.1",
- "origIndexName": "temp_audit",
- "tempIndexName": "temp_audit2",
- "addUTC": False,
- "mappingFileName": "auditMappings.txt",
- "matchAllQuery":{"query": {"match_all": {}}}
-}
-
-def getGlobalVar(propertyName):
- return globalVars.get(propertyName) \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/file_utils.py b/asdctool/src/main/resources/es-resources/file_utils.py
deleted file mode 100644
index 743902084e..0000000000
--- a/asdctool/src/main/resources/es-resources/file_utils.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient
-import sys, os
-
-def readFileToJson(fileName):
- print "read file ", fileName
- fo=open(fileName)
- try:
- json_mapping=json.load(fo)
- fo.close()
- except ValueError:
- print "error in reading file " , fileName
- fo.close()
- raise
- return json_mapping
diff --git a/asdctool/src/main/resources/es-resources/index_ops.py b/asdctool/src/main/resources/es-resources/index_ops.py
deleted file mode 100644
index d1f3bb0021..0000000000
--- a/asdctool/src/main/resources/es-resources/index_ops.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient, CatClient
-import sys, os, getopt
-from file_utils import readFileToJson
-from config_properties import getGlobalVar
-
-
-
-def createIndex(client, indexName, createBody):
- try:
- print "start createIndex"
- if (client == None):
- client = Elasticsearch(['localhost'])
- esIndexClient = IndicesClient(client)
- res = deleteIndex(client, indexName)
- if (res != 0):
- print "operation failed"
- return 2
- create_res=elasticsearch.client.IndicesClient.create(esIndexClient, index=indexName, body=createBody)
- print "create index response: ", create_res
- if (create_res['acknowledged'] != True):
- print "failed to create index"
- return 1
- else:
- print "index ",indexName, " created successfully"
- return 0
- except Exception, error:
- print "An exception was thrown!"
- print str(error)
- return 2
-
-
-def deleteIndex(client, indexName):
- try:
- print "start deleteIndex"
- if (client == None):
- client = Elasticsearch(['localhost'])
- esIndexClient = IndicesClient(client)
- isExists=elasticsearch.client.IndicesClient.exists(esIndexClient, indexName)
- if ( isExists == True ):
- delete_res=elasticsearch.client.IndicesClient.delete(esIndexClient, index=indexName)
- if (delete_res['acknowledged'] != True):
- print "failed to delete index"
- return 1
- else:
- print "index ",indexName, " deleted"
- return 0
- else:
- print "index not found - assume already deleted"
- return 0
- except Exception, error:
- print "An exception was thrown!"
- print str(error)
- return 2
-
-def copyIndex(client, fromIndex, toIndex):
- try:
- print "start copyIndex"
- if (client == None):
- client = Elasticsearch(['localhost'])
- client.indices.refresh(index=fromIndex)
- count=client.search(fromIndex, search_type='count')
- print "original index count: ",count
- docNum, docErrors = elasticsearch.helpers.reindex(client, fromIndex, toIndex)
- print "copy result: ", docNum, docErrors
- if (docNum != count['hits']['total']):
- print "Failed to copy all documents. expected: ", count['hits']['total'], " actual: ", docNum
- return 1
- # if (len(docErrors) != 0):
- # print "copy returned with errors"
- # print docErrors
- # return 1
- return 0
- except Exception, error:
- print "An exception was thrown!"
- print str(error)
- return 2
-
-
-def usage():
- print 'USAGE: ', sys.argv[0], '-o <operation : create | delete | move> -n <indexName> -a <address> -f <mappingFile (for create)> -t <toIndex (for move operation)>'
-
-
-
-def main(argv):
- print "start script with ", len(sys.argv), 'arguments.'
- print "=============================================="
-
- try:
- opts, args = getopt.getopt(argv, "h:o:a:n:f:t:", ["operation","address","indexName","file","toIndex"])
- except getopt.GetoptError:
- usage()
- sys.exit(2)
-
- host = None
- for opt, arg in opts:
- print opt, arg
- if opt == '-h':
- usage()
- sys.exit(2)
- elif opt in ('-f', '--file'):
- mapping=readFileToJson(arg)
- elif opt in ('-a', '--address'):
- host=arg
- elif opt in ('-o', '--operation'):
- operation=arg
- elif opt in ('-n', '--indexName'):
- indexName=arg
- elif opt in ('-t', '--toIndex'):
- destIndexName=arg
-
- if (operation == None):
- usage()
- sys.exit(2)
- elif (host == None):
- print "address is mandatory argument"
- usage()
- sys.exit(2)
- elif operation == 'create':
- print "create new index ", indexName
- client = Elasticsearch([{'host': host, 'timeout':5}] )
- res = createIndex(client, indexName, mapping)
-
- elif operation == 'delete':
- print "delete index ", indexName
- client = Elasticsearch([{'host': host, 'timeout':5}] )
- res = deleteIndex(client, indexName)
-
- elif operation == 'move':
- print "move index ", indexName, " to ", destIndexName
- client = Elasticsearch([{'host': host, 'timeout':5}] )
- res = copyIndex(client, indexName, destIndexName)
- else:
- usage()
- exit(2)
- if res != 0:
- print "ERROR: operation Failed"
- exit(1)
-
-
-
-if __name__ == "__main__":
- main(sys.argv[1:])
-
-
diff --git a/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt b/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt
deleted file mode 100644
index b7e9435f97..0000000000
--- a/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-{ "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "consumerId": "CONSUMER_ID"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt b/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt
deleted file mode 100644
index a74f0370e6..0000000000
--- a/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "currVersion": "CURR_VERSION",
- "distributionId": "DID",
- "modifierName": "MODIFIER_NAME",
- "modifierUid": "MODIFIER_UID",
- "resourceName": "RESOURCE_NAME",
- "resourceType": "RESOURCE_TYPE"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt b/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt
deleted file mode 100644
index 879c4c4231..0000000000
--- a/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-{ "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "resourceUrl": "RESOURCE_URL",
- "consumerId": "CONSUMER_ID"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt b/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt
deleted file mode 100644
index a261042720..0000000000
--- a/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "consumerId": "CONSUMER_ID",
- "role": "ROLE",
- "topicName": "TOPIC_NAME",
- "apiKey": "API_KEY",
- "environmentName": "D_ENV"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt b/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt
deleted file mode 100644
index 6375ead9bb..0000000000
--- a/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "currVersion": "CURR_VERSION",
- "currState": "CURR_STATE",
- "distributionId": "DID",
- "modifierName": "MODIFIER_NAME",
- "modifierUid": "MODIFIER_UID",
- "resourceName": "RESOURCE_NAME",
- "resourceType": "RESOURCE_TYPE",
- "topicName": "TOPIC_NAME"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt b/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt
deleted file mode 100644
index 8fed9dd0c0..0000000000
--- a/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "resourceUrl": "RESOURCE_URL",
- "consumerId": "CONSUMER_ID",
- "distributionId": "DID",
- "topicName": "TOPIC_NAME"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt b/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt
deleted file mode 100644
index 4631aa3367..0000000000
--- a/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "currVersion": "CURR_VERSION",
- "currState": "CURR_STATE",
- "distributionId": "DID",
- "modifierName": "MODIFIER_NAME",
- "modifierUid": "MODIFIER_UID",
- "prevVersion": "PREV_VERSION",
- "prevState": "PREV_STATE",
- "resourceName": "RESOURCE_NAME",
- "resourceType": "RESOURCE_TYPE",
- "dPrevStatus": "DPREV_STATUS",
- "dCurrStatus": "DCURR_STATUS",
- "comment": "COMMENT",
- "artifactName": "ARTIFACT_NAME"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/useraccessevent.txt b/asdctool/src/main/resources/es-resources/types/useraccessevent.txt
deleted file mode 100644
index ebd27b55e3..0000000000
--- a/asdctool/src/main/resources/es-resources/types/useraccessevent.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "userUid": "USER_UID",
- "userName": "USER_NAME"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/useradminevent.txt b/asdctool/src/main/resources/es-resources/types/useradminevent.txt
deleted file mode 100644
index 15e0d9bdca..0000000000
--- a/asdctool/src/main/resources/es-resources/types/useradminevent.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "action": "ACTION",
- "timestamp": "TIMESTAMP",
- "requestId": "REQUEST_ID",
- "serviceInstanceId": "SERVICE_INSTANCE_ID",
- "desc": "DESC",
- "status": "STATUS",
- "modifierName": "MODIFIER_NAME",
- "modifierUid": "MODIFIER_UID",
- "userUid": "USER_UID",
- "userName": "USER_NAME",
- "userEmail": "USER_EMAIL",
- "userRole": "USER_ROLE",
- "userBeforeName": "USER_BEFORE_NAME",
- "userBeforeEmail": "USER_BEFORE_EMAIL",
- "userBeforeRole": "USER_BEFORE_ROLE",
- "userAfterName": "USER_AFTER_NAME",
- "userAfterEmail": "USER_AFTER_EMAIL",
- "userAfterRole": "USER_AFTER_ROLE"
-} \ No newline at end of file
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigration.sh b/asdctool/src/main/resources/scripts/esToCassandraMigration.sh
deleted file mode 100644
index 383904c661..0000000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigration.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
- FULL_PATH=$BASEDIR
-else
- FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh b/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh
deleted file mode 100644
index 2c8e346f30..0000000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
- FULL_PATH=$BASEDIR
-else
- FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration-export-only $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh b/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh
deleted file mode 100644
index 9ce3ca8aae..0000000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
- FULL_PATH=$BASEDIR
-else
- FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration-import-only $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/getConsumers.sh b/asdctool/src/main/resources/scripts/getConsumers.sh
deleted file mode 100644
index d02aac629d..0000000000
--- a/asdctool/src/main/resources/scripts/getConsumers.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-##############################
-# Get list of SDC consumers
-##############################
-
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
- FULL_PATH=$BASEDIR
-else
- FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.GetConsumersMenu"
-
-command="java $JVM_LOG_FILE -Xmx1024M -cp $JARS $mainClass $@"
-echo $command
-
-$command
-result=$?
-
-
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
-
diff --git a/asdctool/src/main/resources/scripts/python/user/exportUsers.py b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
index 9e695ad8fd..ed7515cc3e 100644
--- a/asdctool/src/main/resources/scripts/python/user/exportUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
@@ -40,7 +40,8 @@ def getUsers(scheme, beHost, bePort, adminUser):
c.setopt(pycurl.HTTPHEADER, ['Content-Type: application/json', 'Accept: application/json', adminHeader])
if scheme == 'https':
- c.setopt(c.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYHOST, 0)
res = c.perform()
#print(res)
diff --git a/asdctool/src/main/resources/scripts/python/user/importUsers.py b/asdctool/src/main/resources/scripts/python/user/importUsers.py
index 984b75bd4c..82ddec5139 100644
--- a/asdctool/src/main/resources/scripts/python/user/importUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/importUsers.py
@@ -70,7 +70,8 @@ def getUser(scheme, beHost, bePort, user):
c.setopt(c.WRITEFUNCTION, lambda x: None)
if scheme == 'https':
- c.setopt(c.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYHOST, 0)
res = c.perform()
@@ -111,7 +112,8 @@ def createUser(scheme, beHost, bePort, user, adminUser):
c.setopt(c.WRITEFUNCTION, lambda x: None)
if scheme == 'https':
- c.setopt(c.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYPEER, 0)
+ c.setopt(pycurl.SSL_VERIFYHOST, 0)
#print("before perform")
res = c.perform()
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
index 6193e7e27c..017126e86b 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/UtilsTest.java
@@ -20,9 +20,9 @@
package org.openecomp.sdc.asdctool;
-import org.janusgraph.core.JanusGraph;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.structure.Element;
+import org.janusgraph.core.JanusGraph;
import org.junit.Assert;
import org.junit.Test;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/CLIToolTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/CLIToolTest.java
index 6c15b61a4e..9a5a8b3d9e 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/CLIToolTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/CLIToolTest.java
@@ -21,17 +21,17 @@
package org.openecomp.sdc.asdctool.cli;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
public class CLIToolTest {
private static final String OPT = "t";
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/SpringCLIToolTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/SpringCLIToolTest.java
deleted file mode 100644
index c91a694918..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/cli/SpringCLIToolTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * ONAP SDC
- * ================================================================================
- * Copyright (C) 2019 Samsung. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END============================================
- * ===================================================================
- */
-
-package org.openecomp.sdc.asdctool.cli;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import org.junit.Test;
-import org.springframework.context.annotation.Configuration;
-
-public class SpringCLIToolTest {
-
- private static final String DIR_NAME = "test-dir";
-
- private SpringCLITool impl = new SpringCLIToolImplTest();
-
- @Test
- public void testInit() {
- // when
- final CLIToolData init = impl.init(new String[]{"-c", DIR_NAME});
-
- // then
- assertEquals(DIR_NAME, init.getCommandLine().getOptionValue("c"));
- assertTrue(init.getSpringApplicationContext().containsBean("config"));
- }
-
- private static class SpringCLIToolImplTest extends SpringCLITool {
- @Override
- protected Class<?> getSpringConfigurationClass() {
- return Config.class;
- }
-
- @Override
- protected String commandName() {
- return "run";
- }
- }
-}
-
-@Configuration
-class Config {
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfigurationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfigurationTest.java
index 1ee1c4a700..bc1c87d398 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfigurationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfigurationTest.java
@@ -20,8 +20,6 @@
package org.openecomp.sdc.asdctool.configuration;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix;
import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
@@ -30,7 +28,8 @@ import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
+
+import static org.mockito.Mockito.mock;
public class ArtifactUUIDFixConfigurationTest {
@@ -38,6 +37,9 @@ public class ArtifactUUIDFixConfigurationTest {
return new ArtifactUUIDFixConfiguration();
}
+ private static ToscaExportHandler toscaExportHandler;
+ private static CsarUtils csarUtils;
+
@Test
public void testArtifactUuidFix() throws Exception {
ArtifactUUIDFixConfiguration testSubject;
@@ -47,9 +49,7 @@ public class ArtifactUUIDFixConfigurationTest {
testSubject = createTestSubject();
JanusGraphDao janusGraphDao = mock(JanusGraphDao.class);
ToscaOperationFacade toscaOperationFacade = mock(ToscaOperationFacade.class);
- ToscaExportHandler toscaExportHandler = mock(ToscaExportHandler.class);
ArtifactCassandraDao artifactCassandraDao = mock(ArtifactCassandraDao.class);
- CsarUtils csarUtils = mock(CsarUtils.class);
result = testSubject.artifactUuidFix(janusGraphDao, toscaOperationFacade,
toscaExportHandler, artifactCassandraDao, csarUtils);
@@ -64,13 +64,4 @@ public class ArtifactUUIDFixConfigurationTest {
testSubject = createTestSubject();
}
- @Test
- public void testMapper() throws Exception {
- ArtifactUUIDFixConfiguration testSubject;
- PropertiesFactoryBean result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.mapper();
- }
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfigurationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfigurationTest.java
deleted file mode 100644
index cbc9d37ca1..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfigurationTest.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration;
-
-import org.junit.Test;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
-import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
-
-public class GetConsumersConfigurationTest {
-
- private GetConsumersConfiguration createTestSubject() {
- return new GetConsumersConfiguration();
- }
-
- @Test
- public void testConsumerOperation() throws Exception {
- GetConsumersConfiguration testSubject;
- JanusGraphGenericDao janusGraphGenericDao = null;
- ConsumerOperation result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.consumerOperation(janusGraphGenericDao);
- }
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java
deleted file mode 100644
index 42713d7248..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMockTest.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import fj.data.Either;
-import org.junit.Test;
-import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-
-import java.util.List;
-
-public class ESCatalogDAOMockTest {
-
- private ESCatalogDAOMock createTestSubject() {
- return new ESCatalogDAOMock();
- }
-
- @Test
- public void testAddToIndicesMap() throws Exception {
- ESCatalogDAOMock testSubject;
- String typeName = "";
- String indexName = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.addToIndicesMap(typeName, indexName);
- }
-
- @Test
- public void testWriteArtifact() throws Exception {
- ESCatalogDAOMock testSubject;
- ESArtifactData artifactData = null;
-
- // default test
- testSubject = createTestSubject();
- testSubject.writeArtifact(artifactData);
- }
-
- @Test
- public void testGetArtifact() throws Exception {
- ESCatalogDAOMock testSubject;
- String id = "";
- Either<ESArtifactData, ResourceUploadStatus> result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.getArtifact(id);
- }
-
- @Test
- public void testGetArtifacts() throws Exception {
- ESCatalogDAOMock testSubject;
- String[] ids = new String[] { "" };
- Either<List<ESArtifactData>, ResourceUploadStatus> result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.getArtifacts(ids);
- }
-
- @Test
- public void testDeleteArtifact() throws Exception {
- ESCatalogDAOMock testSubject;
- String id = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.deleteArtifact(id);
- }
-
- @Test
- public void testDeleteAllArtifacts() throws Exception {
- ESCatalogDAOMock testSubject;
-
- // default test
- testSubject = createTestSubject();
- testSubject.deleteAllArtifacts();
- }
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMockTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMockTest.java
deleted file mode 100644
index 6c589fc482..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMockTest.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import org.junit.Test;
-
-public class ElasticSearchClientMockTest {
-
- private ElasticSearchClientMock createTestSubject() {
- return new ElasticSearchClientMock();
- }
-
- @Test
- public void testInitialize() throws Exception {
- ElasticSearchClientMock testSubject;
-
- // default test
- testSubject = createTestSubject();
- testSubject.initialize();
- }
-
- @Test
- public void testSetClusterName() throws Exception {
- ElasticSearchClientMock testSubject;
- String clusterName = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setClusterName(clusterName);
- }
-
- @Test
- public void testSetLocal() throws Exception {
- ElasticSearchClientMock testSubject;
- String strIsLocal = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setLocal(strIsLocal);
- }
-
- @Test
- public void testSetTransportClient() throws Exception {
- ElasticSearchClientMock testSubject;
- String strIsTransportclient = "";
-
- // default test
- testSubject = createTestSubject();
- testSubject.setTransportClient(strIsTransportclient);
- }
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfigurationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfigurationTest.java
deleted file mode 100644
index 868e1e270a..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfigurationTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import org.junit.Test;
-import org.openecomp.sdc.be.dao.api.ICatalogDAO;
-import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-
-public class ElasticSearchMocksConfigurationTest {
-
- private ElasticSearchMocksConfiguration createTestSubject() {
- return new ElasticSearchMocksConfiguration();
- }
-
- @Test
- public void testElasticSearchClientMock() throws Exception {
- ElasticSearchMocksConfiguration testSubject;
- ElasticSearchClient result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.elasticSearchClientMock();
- }
-
- @Test
- public void testEsCatalogDAOMock() throws Exception {
- ElasticSearchMocksConfiguration testSubject;
- ICatalogDAO result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.esCatalogDAOMock();
- }
-
- @Test
- public void testEsHealthCheckDaoMock() throws Exception {
- ElasticSearchMocksConfiguration testSubject;
- IEsHealthCheckDao result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.esHealthCheckDaoMock();
- }
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
index 0ea4484ba4..8d599e587a 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFixTest.java
@@ -20,8 +20,8 @@
package org.openecomp.sdc.asdctool.impl;
-import org.janusgraph.core.JanusGraphVertex;
import fj.data.Either;
+import org.janusgraph.core.JanusGraphVertex;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatchers;
@@ -40,16 +40,26 @@ import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.DistributionStatusEnum;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupInstance;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.DAOArtifactData;
import org.openecomp.sdc.common.api.ArtifactTypeEnum;
import org.openecomp.sdc.common.api.Constants;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.ArrayList;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
@@ -185,11 +195,13 @@ public class ArtifactUuidFixTest {
.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll)).thenReturn(Either.left(list));
when(toscaOperationFacade.getToscaElement(ArgumentMatchers.eq(graphVertex.getUniqueId()),any(ComponentParametersView.class)))
.thenReturn(Either.left(service));
- byte[] payload = "value".getBytes();
- ESArtifactData esArtifactData =new ESArtifactData();
- esArtifactData.setDataAsArray(payload);
- Either<ESArtifactData, CassandraOperationStatus> artifactfromESres = Either.left(esArtifactData);
+ DAOArtifactData artifactData = new DAOArtifactData();
+ byte[] data = "value".getBytes();
+ ByteBuffer bufferData = ByteBuffer.wrap(data);
+ artifactData.setData(bufferData);
+
+ Either<DAOArtifactData, CassandraOperationStatus> artifactfromESres = Either.left(artifactData);
when(artifactCassandraDao.getArtifact(anyString())).thenReturn(artifactfromESres);
result = test.doFix(fixComponent, runMode);
assertEquals(false, result);
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRowTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRowTest.java
index 8b8f9893f6..184bc332d3 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRowTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRowTest.java
@@ -19,11 +19,11 @@
*/
package org.openecomp.sdc.asdctool.impl;
+import org.junit.Test;
+
import static com.google.code.beanmatchers.BeanMatchers.hasValidGettersAndSetters;
import static org.junit.Assert.assertThat;
-import org.junit.Test;
-
public class ComponentInstanceRowTest {
@Test
public void shouldHaveValidGettersAndSetters() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentRowTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentRowTest.java
index e5c139c698..fb78a4e294 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentRowTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ComponentRowTest.java
@@ -19,11 +19,11 @@
*/
package org.openecomp.sdc.asdctool.impl;
+import org.junit.Test;
+
import static com.google.code.beanmatchers.BeanMatchers.hasValidGettersAndSetters;
import static org.junit.Assert.assertThat;
-import org.junit.Test;
-
public class ComponentRowTest {
@Test
public void shouldHaveValidGettersAndSetters() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java
deleted file mode 100644
index 3472721dc6..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/DataMigrationTest.java
+++ /dev/null
@@ -1,371 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-/*
-
- * Copyright (c) 2018 AT&T Intellectual Property.
-
- *
-
- * Licensed under the Apache License, Version 2.0 (the "License");
-
- * you may not use this file except in compliance with the License.
-
- * You may obtain a copy of the License at
-
- *
-
- * http://www.apache.org/licenses/LICENSE-2.0
-
- *
-
- * Unless required by applicable law or agreed to in writing, software
-
- * distributed under the License is distributed on an "AS IS" BASIS,
-
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-
- * See the License for the specific language governing permissions and
-
- * limitations under the License.
-
- */
-package org.openecomp.sdc.asdctool.impl;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.junit.MockitoJUnitRunner;
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.schema.Table;
-import org.openecomp.sdc.be.resources.data.auditing.*;
-import org.openecomp.sdc.common.datastructure.AuditingFieldsKey;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.openecomp.sdc.common.datastructure.AuditingFieldsKey.*;
-
-@RunWith(MockitoJUnitRunner.class)
-public class DataMigrationTest {
- private final static String DESCRIPTION = "OK";
- private final static String STATUS = "200";
- private final static String SERVICE_INSTANCE_ID = "SERVICE_INSTANCE_ID";
- private final static String MODIFIER = "MODIFIER";
- private final static String REQUEST_ID = "REQUEST_ID";
- private final static String USER = "USER";
- private final static String USER_BEFORE = "USER_BEFORE";
- private final static String USER_AFTER = "USER_AFTER";
- private final static String ARTIFACT_UUID = "ARTIFACT_UUID";
-
- private final static String PREV_STATE = "PREV_STATE";
- private final static String CURR_STATE = "CURR_STATE";
- private final static String PREV_VERSION = "PREV_VERSION";
- private final static String CURR_VERSION = "CURR_VERSION";
- private final static String DPREV_STATUS = "DPREV_STATUS";
- private final static String DCURR_STATUS = "CURR_STATUS";
- private final static String INVARIANT_UUID = "INVARIANT_UUID";
- private final static String ARTIFACT_DATA = "ARTIFACT_DATA";
- private final static String COMMENT = "COMMENT";
- private final static String DISTRIBUTION_ID = "DISTRIBUTION_ID";
- private final static String TOSCA_NODE_TYPE = "TOSCA_NODE_TYPE";
- private final static String CONSUMER_ID = "CONSUMER_ID";
- private final static String RESOURCE_URL = "RESOURCE_URL";
- private final static String ENV_ID = "ENV_ID";
- private final static String VNF_WORKLOAD_CONTEXT = "VNF_WORKLOAD_CONTEXT";
- private final static String TENANT = "TENANT";
- private final static String RESOURCE_NAME = "RESOURCE_NAME";
- private final static String RESOURCE_TYPE = "RESOURCE_TYPE";
- private final static String AUTH_URL = "AUTH_URL";
- private final static String AUTH_RELM = "AUTH_RELM";
- private final static String TOPIC_NAME = "TOPIC_NAME";
-
- private final static String dateFormatPattern = "yyyy-MM-dd HH:mm:ss.SSS z";
-
- private static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateFormatPattern);
-
- private static ArtifactCassandraDao artifactCassandraDao = mock(ArtifactCassandraDao.class);
-
- private static AuditCassandraDao auditCassandraDaoMock = mock(AuditCassandraDao.class);
-
- private static DataMigration dataMigration = new DataMigration(auditCassandraDaoMock, artifactCassandraDao);
-
- private final static String ES_STRING = "{\"" + AuditingFieldsKey.AUDIT_ACTION + "\":\"%s\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_NAME + "\":\"" + RESOURCE_NAME + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_TOSCA_NODE_TYPE + "\":\"" + TOSCA_NODE_TYPE +
- "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_PREV_VERSION + "\":\"" + PREV_VERSION + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_PREV_STATE + "\":\"" + PREV_STATE +
- "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_TYPE + "\":\"" + RESOURCE_TYPE + "\", \"" + AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID + "\":\"" + SERVICE_INSTANCE_ID +
- "\", \"" + AuditingFieldsKey.AUDIT_INVARIANT_UUID + "\":\"" + INVARIANT_UUID + "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION + "\":\"" + CURR_VERSION +
- "\", \"" + AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE + "\":\"" + CURR_STATE + "\", \"" + AuditingFieldsKey.AUDIT_MODIFIER_UID + "\":\"" + MODIFIER +
- "\", \"" + AuditingFieldsKey.AUDIT_DESC + "\":\"" + DESCRIPTION + "\", \"" + AuditingFieldsKey.AUDIT_STATUS + "\":\"" + STATUS +
- "\", \"" + AuditingFieldsKey.AUDIT_REQUEST_ID + "\":\"" + REQUEST_ID + "\", \"" + AuditingFieldsKey.AUDIT_CURR_ARTIFACT_UUID + "\":\"" + ARTIFACT_UUID +
- "\", \"" + AuditingFieldsKey.AUDIT_PREV_ARTIFACT_UUID + "\":\"" + ARTIFACT_UUID + "\", \"" + AuditingFieldsKey.AUDIT_ARTIFACT_DATA + "\":\"" + ARTIFACT_DATA +
- "\", \"" + AuditingFieldsKey.AUDIT_TIMESTAMP + "\":\"%s\"}";
-
-
- private String timestampStr;
-
- private HashMap<AuditingFieldsKey, String> dataMap = new HashMap<>();
-
- @Before
- public void setUp() {
- dataMap.put(AUDIT_DESC, DESCRIPTION);
- dataMap.put(AUDIT_STATUS, STATUS);
- dataMap.put(AUDIT_REQUEST_ID, REQUEST_ID);
- dataMap.put(AUDIT_SERVICE_INSTANCE_ID, SERVICE_INSTANCE_ID);
- dataMap.put(AUDIT_MODIFIER_UID, MODIFIER);
- dataMap.put(AUDIT_USER_BEFORE, USER_BEFORE);
- dataMap.put(AUDIT_USER_UID, USER);
- dataMap.put(AUDIT_USER_AFTER, USER_AFTER);
- dataMap.put(AUDIT_AUTH_URL, AUTH_URL);
- dataMap.put(AUDIT_AUTH_REALM, AUTH_RELM);
- dataMap.put(AUDIT_PREV_ARTIFACT_UUID, ARTIFACT_UUID);
- dataMap.put(AUDIT_CURR_ARTIFACT_UUID, ARTIFACT_UUID);
- dataMap.put(AUDIT_RESOURCE_PREV_STATE, PREV_STATE);
- dataMap.put(AUDIT_RESOURCE_PREV_VERSION, PREV_VERSION);
- dataMap.put(AUDIT_RESOURCE_CURR_STATE, CURR_STATE);
- dataMap.put(AUDIT_RESOURCE_CURR_VERSION, CURR_VERSION);
- dataMap.put(AUDIT_RESOURCE_DPREV_STATUS, DPREV_STATUS);
- dataMap.put(AUDIT_RESOURCE_DCURR_STATUS, DCURR_STATUS);
- dataMap.put(AUDIT_INVARIANT_UUID, INVARIANT_UUID);
- dataMap.put(AUDIT_ARTIFACT_DATA, ARTIFACT_DATA);
- dataMap.put(AUDIT_RESOURCE_COMMENT, COMMENT);
- dataMap.put(AUDIT_DISTRIBUTION_ID, DISTRIBUTION_ID);
- dataMap.put(AUDIT_RESOURCE_TOSCA_NODE_TYPE, TOSCA_NODE_TYPE);
- dataMap.put(AUDIT_DISTRIBUTION_CONSUMER_ID, CONSUMER_ID);
- dataMap.put(AUDIT_RESOURCE_URL, RESOURCE_URL);
- dataMap.put(AUDIT_DISTRIBUTION_ENVIRONMENT_ID, ENV_ID);
- dataMap.put(AUDIT_DISTRIBUTION_VNF_WORKLOAD_CONTEXT, VNF_WORKLOAD_CONTEXT);
- dataMap.put(AUDIT_DISTRIBUTION_TENANT, TENANT);
- dataMap.put(AUDIT_RESOURCE_NAME, RESOURCE_NAME);
- dataMap.put(AUDIT_RESOURCE_TYPE, RESOURCE_TYPE);
- timestampStr = simpleDateFormat.format(new Date());
- dataMap.put(AUDIT_TIMESTAMP, timestampStr);
- dataMap.put(AUDIT_DISTRIBUTION_TOPIC_NAME, TOPIC_NAME);
-
- }
-
- @Test
- public void createUserAdminEvent() {
- dataMap.put(AUDIT_ACTION, AuditingActionEnum.ADD_USER.getName());
- AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.USER_ADMIN_EVENT);
- assertThat(AuditingActionEnum.ADD_USER.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- verifyUserAdminEvent((UserAdminEvent) event);
- }
-
- @Test
- public void createResourceAdminEvent() {
- dataMap.put(AUDIT_ACTION, AuditingActionEnum.UPDATE_RESOURCE_METADATA.getName());
- AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.RESOURCE_ADMIN_EVENT);
- assertThat(AuditingActionEnum.UPDATE_RESOURCE_METADATA.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- verifyResourceAdminEvent((ResourceAdminEvent)event);
- }
-
- @Test
- public void createDistributionNotificationEvent() {
- dataMap.put(AUDIT_ACTION, AuditingActionEnum.DISTRIBUTION_NOTIFY.getName());
- AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.DISTRIBUTION_NOTIFICATION_EVENT);
- assertThat(AuditingActionEnum.DISTRIBUTION_NOTIFY.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- verifyDistributionNotificationEvent((DistributionNotificationEvent)event);
- }
-
- @Test
- public void createEventWhenSomeFieldValuesNotSet() {
- dataMap.clear();
- dataMap.put(AUDIT_ACTION, AuditingActionEnum.AUTH_REQUEST.getName());
- AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.AUTH_EVENT);
- assertThat(AuditingActionEnum.AUTH_REQUEST.getName()).isEqualTo(event.getAction());
- assertThat(event.getStatus()).isNull();
- assertThat(event.getDesc()).isNull();
- assertThat(event.getRequestId()).isNull();
- }
-
- @Test
- public void createAuthEvent() {
- dataMap.put(AUDIT_ACTION, AuditingActionEnum.AUTH_REQUEST.getName());
- AuditingGenericEvent event = dataMigration.createAuditEvent(dataMap, Table.AUTH_EVENT);
- assertThat(AuditingActionEnum.AUTH_REQUEST.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, false);
- verifyAuthEvent((AuthEvent) event);
- }
-
- @Test
- public void createImportResourceEventFromEsObject() throws IOException{
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr), Table.RESOURCE_ADMIN_EVENT);
- assertThat(AuditingActionEnum.IMPORT_RESOURCE.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- verifyResourceAdminEvent((ResourceAdminEvent)event);
- }
-
- @Test
- public void createGetUserListEventFromEsObject() throws IOException{
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.GET_USERS_LIST.getName(), timestampStr),
- Table.GET_USERS_LIST_EVENT);
- assertThat(AuditingActionEnum.GET_USERS_LIST.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, false);
- assertThat(((GetUsersListEvent)event).getModifier()).isEqualTo(MODIFIER);
- }
-
- @Test
- public void createEventFromEsFailedWhenActionDoesNotExist() throws IOException {
- dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.CONSUMER_EVENT);
- }
-
- @Test(expected = NullPointerException.class)
- public void createRecordWhenJsonIsEmpty() throws IOException{
- dataMigration.createAuditRecordForCassandra("{}",
- Table.CONSUMER_EVENT);
- }
-
- @Test
- public void createEventFromUEBCluster() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT);
- assertThat(AuditingActionEnum.GET_UEB_CLUSTER.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createEventFromDistEngine() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.DISTRIBUTION_ENGINE_EVENT);
- assertThat(AuditingActionEnum.IMPORT_RESOURCE.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createEventFromDistStatus() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.DISTRIBUTION_STATUS_EVENT);
- assertThat(AuditingActionEnum.DISTRIBUTION_STATUS.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createEventFromCategory() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.CATEGORY_EVENT);
- assertThat(AuditingActionEnum.IMPORT_RESOURCE.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createFromCategoryHierarchy() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.GET_CATEGORY_HIERARCHY_EVENT);
- assertThat(AuditingActionEnum.GET_CATEGORY_HIERARCHY.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, false);
- }
-
- @Test
- public void createEventFromUserAccess() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.USER_ACCESS_EVENT);
- assertThat(AuditingActionEnum.USER_ACCESS.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createEventFromDistDwnld() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.DISTRIBUTION_DOWNLOAD_EVENT);
- assertThat(AuditingActionEnum.DISTRIBUTION_ARTIFACT_DOWNLOAD.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- @Test
- public void createEventFromDistDeploy() throws IOException
- {
- AuditingGenericEvent event = dataMigration.createAuditRecordForCassandra(String.format(ES_STRING, AuditingActionEnum.IMPORT_RESOURCE.getName(), timestampStr),
- Table.DISTRIBUTION_DEPLOY_EVENT);
- assertThat(AuditingActionEnum.DISTRIBUTION_DEPLOY.getName()).isEqualTo(event.getAction());
- verifyCommonData(event, true);
- }
-
- private void verifyCommonData(AuditingGenericEvent event, boolean isServiceInstanceProvided) {
- assertThat(STATUS).isEqualTo(event.getStatus());
- if (isServiceInstanceProvided) {
- assertThat(SERVICE_INSTANCE_ID).isEqualTo(event.getServiceInstanceId());
- }
- else {
- assertThat(event.getServiceInstanceId()).isNull();
- }
- assertThat(DESCRIPTION).isEqualTo(event.getDesc());
- assertThat(REQUEST_ID).isEqualTo(event.getRequestId());
- }
-
- private void verifyUserAdminEvent(UserAdminEvent event) {
- assertThat(USER_AFTER).isEqualTo(event.getUserAfter());
- assertThat(USER_BEFORE).isEqualTo(event.getUserBefore());
- verifyTimestamp(event.getTimestamp1());
- }
-
- private void verifyAuthEvent(AuthEvent event) {
- assertThat(USER).isEqualTo(event.getUser());
- assertThat(AUTH_URL).isEqualTo(event.getUrl());
- assertThat(event.getAuthStatus()).isNull();
- assertThat(AUTH_RELM).isEqualTo(event.getRealm());
- }
-
- private void verifyTimestamp(Date date) {
- assertThat(timestampStr).isEqualTo(simpleDateFormat.format(date));
- }
-
- private void verifyResourceAdminEvent(ResourceAdminEvent event) {
- assertThat(CURR_STATE).isEqualTo(event.getCurrState());
- assertThat(CURR_VERSION).isEqualTo(event.getCurrVersion());
- assertThat(ARTIFACT_UUID).isEqualTo(event.getCurrArtifactUUID());
- assertThat(PREV_STATE).isEqualTo(event.getPrevState());
- assertThat(PREV_VERSION).isEqualTo(event.getPrevVersion());
- assertThat(ARTIFACT_UUID).isEqualTo(event.getPrevArtifactUUID());
- assertThat(INVARIANT_UUID).isEqualTo(event.getInvariantUUID());
- assertThat(ARTIFACT_DATA).isEqualTo(event.getArtifactData());
- assertThat(RESOURCE_NAME).isEqualTo(event.getResourceName());
- assertThat(RESOURCE_TYPE).isEqualTo(event.getResourceType());
- verifyTimestamp(event.getTimestamp1());
- assertThat(TOSCA_NODE_TYPE).isEqualTo( event.getToscaNodeType());
- }
-
- private void verifyDistributionNotificationEvent(DistributionNotificationEvent event) {
- assertThat(CURR_STATE).isEqualTo(event.getCurrState());
- assertThat(CURR_VERSION).isEqualTo(event.getCurrVersion());
- assertThat(TOPIC_NAME).isEqualTo(event.getTopicName());
- assertThat(DISTRIBUTION_ID).isEqualTo(event.getDid());
- assertThat(ENV_ID).isEqualTo(event.getEnvId());
- assertThat(VNF_WORKLOAD_CONTEXT).isEqualTo(event.getVnfWorkloadContext());
- assertThat(TENANT).isEqualTo(event.getTenant());
- verifyTimestamp(event.getTimestamp1());
- }
-
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfigTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfigTest.java
deleted file mode 100644
index f1f161b2db..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfigTest.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.impl;
-
-import static org.mockito.Mockito.mock;
-
-import org.junit.Test;
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
-import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
-
-import static org.mockito.Mockito.mock;
-
-public class EsToCassandraDataMigrationConfigTest {
-
- private EsToCassandraDataMigrationConfig createTestSubject() {
- return new EsToCassandraDataMigrationConfig();
- }
-
- @Test
- public void testDataMigration() throws Exception {
- EsToCassandraDataMigrationConfig testSubject;
- DataMigration result;
- AuditCassandraDao auditCassandraDaoMock = mock(AuditCassandraDao.class);
- ArtifactCassandraDao artifactCassandraDaoMock = mock(ArtifactCassandraDao.class);
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.dataMigration(auditCassandraDaoMock, artifactCassandraDaoMock);
- }
-
- @Test
- public void testArtifactCassandraDao() throws Exception {
- EsToCassandraDataMigrationConfig testSubject;
- ArtifactCassandraDao result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.artifactCassandraDao(mock(CassandraClient.class));
- }
-
- @Test
- public void testAuditCassandraDao() throws Exception {
- EsToCassandraDataMigrationConfig testSubject;
- AuditCassandraDao result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.auditCassandraDao(mock(CassandraClient.class));
- }
-
- @Test
- public void testCassandraClient() throws Exception {
- EsToCassandraDataMigrationConfig testSubject;
- CassandraClient result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.cassandraClient();
- }
-
- @Test
- public void testSdcSchemaFilesCassandraDao() throws Exception {
- EsToCassandraDataMigrationConfig testSubject;
- SdcSchemaFilesCassandraDao result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.sdcSchemaFilesCassandraDao(mock(CassandraClient.class));
- }
-}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
index 14b288b8f5..221d74cbe6 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidatorTest.java
@@ -20,13 +20,13 @@
package org.openecomp.sdc.asdctool.impl;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
import org.junit.Test;
import java.nio.file.NoSuchFileException;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
public class GraphJsonValidatorTest {
private GraphJsonValidator createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
index a758526e48..87eb40d10b 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/GraphMLConverterTest.java
@@ -20,9 +20,9 @@
package org.openecomp.sdc.asdctool.impl;
-import org.janusgraph.core.JanusGraph;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.tinkerpop.gremlin.structure.Element;
+import org.janusgraph.core.JanusGraph;
import org.junit.Test;
import java.util.List;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ProductLogicTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ProductLogicTest.java
index 41381ed055..f0845fe8f4 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ProductLogicTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/ProductLogicTest.java
@@ -20,9 +20,10 @@
package org.openecomp.sdc.asdctool.impl;
-import static org.junit.Assert.assertFalse;
import org.junit.Test;
+import static org.junit.Assert.assertFalse;
+
public class ProductLogicTest {
private ProductLogic createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java
index 8dbb194c70..9e6b2e424e 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandlerTest.java
@@ -26,9 +26,9 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyMap;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGeneratorTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGeneratorTest.java
index 3be90e83b2..c297c23218 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGeneratorTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGeneratorTest.java
@@ -17,22 +17,7 @@
package org.openecomp.sdc.asdctool.impl.internal.tool;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.when;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Scanner;
-
+import fj.data.Either;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -55,8 +40,6 @@ import org.openecomp.sdc.be.model.ArtifactDefinition;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-
-import fj.data.Either;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
@@ -66,6 +49,22 @@ import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Scanner;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.when;
+
@RunWith(MockitoJUnitRunner.class)
public class CsarGeneratorTest {
@@ -319,6 +318,8 @@ public class CsarGeneratorTest {
.replace("\t\t\n", "\n")
.replace("\t\n", "\n")
.replace("\t\t", "\t")
+ .replace("\t\r", "")
+ .replace("\r", "")
.split("\n", -1);
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandlerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandlerTest.java
index b7e8eae560..b9cf317e70 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandlerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandlerTest.java
@@ -29,8 +29,8 @@
*/
package org.openecomp.sdc.asdctool.impl.internal.tool;
-import org.janusgraph.core.JanusGraphVertex;
import fj.data.Either;
+import org.janusgraph.core.JanusGraphVertex;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentMatchers;
@@ -44,6 +44,7 @@ import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
+
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.HashMap;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
index da3c858170..ee41d626b2 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBLTest.java
@@ -20,16 +20,16 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import static org.mockito.Mockito.mock;
-
-import java.util.ArrayList;
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
-
-import java.util.LinkedList;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import java.util.ArrayList;
+import java.util.LinkedList;
+
+import static org.mockito.Mockito.mock;
+
public class ArtifactToolBLTest {
private ArtifactToolBL createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
index 57c4d9c02e..f7f8307638 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBLTest.java
@@ -20,15 +20,14 @@
package org.openecomp.sdc.asdctool.impl.validator;
-import static org.mockito.Mockito.mock;
-
-import java.util.ArrayList;
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import java.util.ArrayList;
import java.util.LinkedList;
-import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+
+import static org.mockito.Mockito.mock;
public class ValidationToolBLTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
index 201d307cea..31f6424e28 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfigurationTest.java
@@ -20,13 +20,14 @@
package org.openecomp.sdc.asdctool.impl.validator.config;
-import static org.mockito.Mockito.mock;
-
-import java.util.ArrayList;
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
-import org.openecomp.sdc.asdctool.impl.validator.executers.*;
+import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.VfArtifactValidationTask;
@@ -35,10 +36,15 @@ import org.openecomp.sdc.be.dao.JanusGraphClientStrategy;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
import org.openecomp.sdc.be.dao.impl.HealingPipelineDao;
-import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphClient;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.*;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.GroupsOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTypeOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+
+import java.util.ArrayList;
import static org.mockito.Mockito.mock;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
index a54463bc52..176a0ca79c 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuterTest.java
@@ -20,21 +20,21 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import org.testng.Assert;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.testng.Assert;
+
+import static org.mockito.Mockito.mock;
public class ArtifactValidatorExecuterTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuterTest.java
index a5f816c25c..e84b89af71 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuterTest.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import static org.mockito.Mockito.mock;
+
public class NodeToscaArtifactsValidatorExecuterTest {
private NodeToscaArtifactsValidatorExecuter createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutorTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutorTest.java
index 9c0571cbb8..51f1cc63da 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutorTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutorTest.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import static org.mockito.Mockito.mock;
+
public class ServiceToscaArtifactsValidatorExecutorTest {
private ServiceToscaArtifactsValidatorExecutor createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuterTest.java
index 6b243b359e..16e36369e6 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuterTest.java
@@ -20,11 +20,11 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import static org.mockito.Mockito.mock;
+
public class ServiceValidatorExecuterTest {
private ServiceValidatorExecuter createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuterTest.java
index 41c04c37ec..6e3c15d58d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuterTest.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import static org.mockito.Mockito.mock;
+
public class TopologyTemplateValidatorExecuterTest {
private TopologyTemplateValidatorExecuter createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutorTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutorTest.java
index 1dfd353087..bf9436336d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutorTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutorTest.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import static org.mockito.Mockito.mock;
+
public class VFToscaArtifactValidatorExecutorTest {
private VFToscaArtifactValidatorExecutor createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuterTest.java
index 688f4ca108..457c9b0d19 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuterTest.java
@@ -20,14 +20,15 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
-import static org.mockito.Mockito.mock;
-
-import java.util.ArrayList;
-import java.util.List;
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.mockito.Mockito.mock;
+
public class VfValidatorExecuterTest {
private VfValidatorExecuter createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
index 50e5f87305..b5ce1abca0 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtilsTest.java
@@ -21,22 +21,7 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.when;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.stream.IntStream;
-
+import fj.data.Either;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -44,26 +29,37 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
-
import org.mockito.junit.MockitoJUnitRunner;
import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManagerHelper;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.MapArtifactDataDefinition;
import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-
-import fj.data.Either;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.modules.junit4.PowerMockRunnerDelegate;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.stream.IntStream;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.when;
+
@RunWith(PowerMockRunner.class)
@PowerMockRunnerDelegate(MockitoJUnitRunner.class)
@PrepareForTest({ReportManager.class})
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTaskTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTaskTest.java
index d55f42131a..0aab99237b 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTaskTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTaskTest.java
@@ -20,11 +20,10 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.testng.Assert;
+
+import static org.mockito.Mockito.mock;
public class ServiceArtifactValidationTaskTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTaskTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTaskTest.java
index a02f80a785..0dba9569ac 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTaskTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTaskTest.java
@@ -20,12 +20,12 @@
package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
-import static org.mockito.Mockito.mock;
-
import org.junit.Test;
import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import static org.mockito.Mockito.mock;
+
public class VfArtifactValidationTaskTest {
private VfArtifactValidationTask createTestSubject() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerHelper.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerHelper.java
index 9bcffbbf24..c832c47df4 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerHelper.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerHelper.java
@@ -20,6 +20,8 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
+import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
+
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
@@ -27,8 +29,6 @@ import java.nio.file.Paths;
import java.util.List;
import java.util.stream.Collectors;
-import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
-
public class ReportManagerHelper {
private ReportManagerHelper() {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
index c3cc3069bf..4926c1dee1 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManagerTest.java
@@ -21,17 +21,6 @@
package org.openecomp.sdc.asdctool.impl.validator.utils;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.mockito.Mockito.when;
-
-import java.io.File;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Objects;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -44,6 +33,17 @@ import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.modules.junit4.PowerMockRunnerDelegate;
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.when;
+
@RunWith(PowerMockRunner.class)
@PowerMockRunnerDelegate(MockitoJUnitRunner.class)
@PrepareForTest({ReportManager.class})
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/ExportImportMenuTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/ExportImportMenuTest.java
deleted file mode 100644
index 497e116a57..0000000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/main/ExportImportMenuTest.java
+++ /dev/null
@@ -1,272 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 Nokia Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.openecomp.sdc.asdctool.main;
-
-import static org.junit.Assert.assertEquals;
-
-import java.nio.file.NoSuchFileException;
-import java.security.Permission;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.contrib.java.lang.system.ExpectedSystemExit;
-import org.junit.contrib.java.lang.system.SystemOutRule;
-
-public class ExportImportMenuTest{
-
- private static final String EXPORT_USAGE = "Usage: export <janusgraph.properties> <output directory>\n";
- private static final String EXPORT_AS_GRAPH_ML_USAGE = "Usage: export-as-graph-ml <janusgraph.properties> <output directory>\n";
- private static final String IMPORT_USAGE = "Usage: import <janusgraph.properties> <graph file location>\n";
- private static final String EXPORT_USERS_USAGE = "Usage: exportusers <janusgraph.properties> <output directory>\n";
- private static final String EXPORT_WITH_REPORT_USAGE = "Usage: export-as-graph-ml-with-data-report <janusgraph.properties> <output directory>\n";
- private static final String DATA_REPORT_USAGE = "Usage: get-data-report-from-graph-ml <full path of .graphml file>\n";
- private static final String VALIDATE_JSON_USAGE = "Usage: validate-json <export graph path>\n";
- private static final String FIND_PROBLEM_USAGE = "Usage: findproblem <janusgraph.properties> <graph file location>\n";
- private static final String USAGE = DATA_REPORT_USAGE + EXPORT_USAGE + EXPORT_AS_GRAPH_ML_USAGE + EXPORT_USERS_USAGE
- + EXPORT_WITH_REPORT_USAGE + FIND_PROBLEM_USAGE + IMPORT_USAGE + VALIDATE_JSON_USAGE;
- private static final String PARAM_3 = "param3";
- private static final String PARAM_2 = "param2";
- private static final String EXPORT = "export";
- private static final String EXPORT_AS_GRAPH_ML = "export-as-graph-ml";
- private static final String NONEXISTENT = "nonexistent";
- private static final String IMPORT = "import";
- private static final String EXPORT_USERS = "exportusers";
- private static final String DATA_REPORT = "get-data-report-from-graph-ml";
- private static final String FIND_PROBLEM = "findproblem";
- private static final String VALIDATE_JSON = "validate-json";
- private static final String EXPORT_WITH_REPORT = "export-as-graph-ml-with-data-report";
-
- @Rule
- public final SystemOutRule systemOutRule = new SystemOutRule().enableLog();
-
- @Rule
- public final ExpectedSystemExit exit = ExpectedSystemExit.none();
-
- @Test
- public void testOfMainWithInvalidLengthOfArgs() throws Exception {
- String [] args = {};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithInvalidLengthOfArgs() {
- String [] args = {};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, USAGE);
- }
-
- @Test
- public void testOfMainWithDefaultOperation() throws Exception {
- String [] args = {NONEXISTENT};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfOfMainWithDefaultOperation() {
- String [] args = {NONEXISTENT};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, USAGE);
- }
-
- @Test
- public void testOfMainWithExportOperationAndInvalidNoArgs() throws Exception {
- String [] args = {EXPORT};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithExportOperationAndInvalidNoArgs(){
- String [] args = {EXPORT};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, EXPORT_USAGE);
- }
-
- @Test
- public void testOfMainWithExportOperationAndValidNoArgs() throws Exception {
- String [] args = {EXPORT, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithExportUsersOperationAndInvalidNoArgs(){
- String [] args = {EXPORT_USERS};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, EXPORT_USERS_USAGE);
- }
-
- @Test
- public void testOfMainWithExportUsersOperationAndValidNoArgs() throws Exception {
- String [] args = {EXPORT_USERS, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithImportOperationAndInvalidNoArgs(){
- String [] args = {IMPORT};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, IMPORT_USAGE);
- }
-
- @Test
- public void testOfMainWithImportOperationAndValidNoArgs() throws Exception {
- String [] args = {IMPORT, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithDataReportOperationAndInvalidNoArgs(){
- String [] args = {DATA_REPORT};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, DATA_REPORT_USAGE);
- }
-
- @Test
- public void testOfMainWithDataReportOperationAndValidNoArgs() throws Exception {
- String [] args = {DATA_REPORT, PARAM_2};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithExportAsGraphMLOperationAndInvalidNoArgs(){
- String [] args = {EXPORT_AS_GRAPH_ML};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, EXPORT_AS_GRAPH_ML_USAGE);
- }
-
- @Test
- public void testMainWithExportAsGraphMLOperationAndInvalidNoArgs() throws Exception {
- String [] args = {EXPORT_AS_GRAPH_ML};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOfMainWithExportAsGraphMLOperationAndValidNoArgs() throws Exception {
- String [] args = {EXPORT_AS_GRAPH_ML, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithFindProblemOperationAndInvalidNoArgs(){
- String [] args = {FIND_PROBLEM};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, FIND_PROBLEM_USAGE);
- }
-
- @Test
- public void testMainWithFindProblemOperationAndInvalidNoArgs() throws Exception {
- String [] args = {FIND_PROBLEM};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOfMainWithFindProblemOperationAndValidNoArgs() throws Exception {
- String [] args = {FIND_PROBLEM, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithExportWithReportOperationAndInvalidNoArgs(){
- String [] args = {EXPORT_WITH_REPORT};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, EXPORT_WITH_REPORT_USAGE);
- }
-
- @Test
- public void testMainWithExportWithReportOperationAndInvalidNoArgs() throws Exception {
- String [] args = {EXPORT_WITH_REPORT};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOfMainWithExportWithReportOperationAndValidNoArgs() throws Exception {
- String [] args = {EXPORT_WITH_REPORT, PARAM_2, PARAM_3};
- exit.expectSystemExitWithStatus(2);
- ExportImportMenu.main(args);
- }
-
- @Test
- public void testOutputOfMainWithValidateJsonOperationAndInvalidNoArgs(){
- String [] args = {VALIDATE_JSON};
- callMainWithoutSystemExit(args);
- String log = systemOutRule.getLog();
- assertEquals(log, VALIDATE_JSON_USAGE);
- }
-
- @Test
- public void testMainWithValidateJsonOperationAndInvalidNoArgs() throws Exception {
- String [] args = {VALIDATE_JSON};
- exit.expectSystemExitWithStatus(1);
- ExportImportMenu.main(args);
- }
-
- @Test(expected = NoSuchFileException.class)
- public void testOfMainWithValidateJsonOperationAndValidNoArgs() throws Exception {
- String [] args = {VALIDATE_JSON, PARAM_2, PARAM_3};
- ExportImportMenu.main(args);
- }
-
- private void callMainWithoutSystemExit(String[] params) {
-
- class NoExitException extends RuntimeException {}
-
- SecurityManager securityManager = System.getSecurityManager();
- System.setSecurityManager(new SecurityManager(){
-
- @Override
- public void checkPermission(Permission permission) {
- }
-
- @Override
- public void checkPermission(Permission permission, Object o) {
- }
-
- @Override
- public void checkExit(int status) {
- super.checkExit(status);
- throw new NoExitException();
- }
- });
- try {
- ExportImportMenu.main(params);
- }catch (Exception ignore){}
- System.setSecurityManager(securityManager);
- }
-
-} \ No newline at end of file
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java
index ac5fa6eda3..3669fa970d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java
@@ -26,9 +26,7 @@ import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao;
import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
-import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
import static org.mockito.Mockito.mock;
@@ -82,13 +80,5 @@ public class MigrationSpringConfigTest {
result = testSubject.migrationTasksDao(mock(CassandraClient.class));
}
- @Test
- public void testMapper() throws Exception {
- MigrationSpringConfig testSubject;
- PropertiesFactoryBean result;
- // default test
- testSubject = createTestSubject();
- result = testSubject.mapper();
- }
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
index 2de6fc82f7..a9172a331b 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationToolTest.java
@@ -35,7 +35,9 @@ import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
public class SdcMigrationToolTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
index 231d4dae95..81583033a6 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/scanner/ClassScanner.java
@@ -26,7 +26,11 @@ import org.openecomp.sdc.asdctool.migration.core.MigrationException;
import java.io.File;
import java.lang.reflect.Modifier;
import java.net.URL;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
/**
* scan and instantiate classes of given type in the class path
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
index 5e6bab2a4e..ddcbb8bdc2 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoServiceTest.java
@@ -51,18 +51,11 @@ public class SdcRepoServiceTest {
@Test
public void testGetLatestVersion_noMinorVersionForCurrentVersion() {
- when(migrationTasksDaoMock.getLatestMinorVersion(DBVersion.CURRENT_VERSION.getMajor())).thenReturn(null);
+ when(migrationTasksDaoMock.getLatestMajorVersion()).thenReturn(DBVersion.DEFAULT_VERSION.getMajor());
+ when(migrationTasksDaoMock.getLatestMinorVersion(migrationTasksDaoMock.getLatestMajorVersion())).thenReturn(BigInteger.valueOf(0));
DBVersion latestDBVersion = testInstance.getLatestDBVersion();
- assertEquals(latestDBVersion.getMajor(), DBVersion.CURRENT_VERSION.getMajor());
- assertEquals(latestDBVersion.getMinor(), BigInteger.valueOf(Integer.MIN_VALUE));
- }
-
- @Test
- public void testGetLatestVersion() {
- when(migrationTasksDaoMock.getLatestMinorVersion(DBVersion.CURRENT_VERSION.getMajor())).thenReturn(BigInteger.TEN);
- DBVersion latestDBVersion = testInstance.getLatestDBVersion();
- assertEquals(latestDBVersion.getMajor(), DBVersion.CURRENT_VERSION.getMajor());
- assertEquals(latestDBVersion.getMinor(), BigInteger.TEN);
+ assertEquals(latestDBVersion.getMajor(), DBVersion.DEFAULT_VERSION.getMajor());
+ assertEquals(latestDBVersion.getMinor(), BigInteger.valueOf(0));
}
@Test
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
index e45e8989fd..138abf3c83 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java
@@ -59,13 +59,13 @@ public class MigrationTasksTest {
@Test
public void testNoTaskWithVersionGreaterThanCurrentVersion() throws Exception {
- Set<Migration> migrationsWithVersionsGreaterThanCurrent = migrations.stream().filter(mig -> mig.getVersion().compareTo(DBVersion.CURRENT_VERSION) > 0)
+ Set<Migration> migrationsWithVersionsGreaterThanCurrent = migrations.stream().filter(mig -> mig.getVersion().compareTo(DBVersion.DEFAULT_VERSION) > 0)
.collect(Collectors.toSet());
if (!migrationsWithVersionsGreaterThanCurrent.isEmpty()) {
- Assert.fail(String.format("migrations tasks %s have version which is greater than DBVersion.CURRENT_VERSION %s. did you forget to update current version?",
+ Assert.fail(String.format("migrations tasks %s have version which is greater than DBVersion.DEFAULT_VERSION %s. did you forget to update current version?",
getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent),
- DBVersion.CURRENT_VERSION.toString()));
+ DBVersion.DEFAULT_VERSION.toString()));
}
}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
index 8e1b482344..45212c3c6b 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandlerTest.java
@@ -33,7 +33,9 @@ import java.io.IOException;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class XlsOutputHandlerTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
index 64e06ac5fb..8763a1bf68 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
@@ -51,11 +51,17 @@ import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.http.client.api.HttpRequestHandler;
import org.openecomp.sdc.exception.ResponseFormat;
@@ -71,7 +77,10 @@ import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class UpgradeMigration1710Test {
@@ -89,7 +98,7 @@ public class UpgradeMigration1710Test {
@InjectMocks
private UpgradeMigration1710 migration = new UpgradeMigration1710();
@Mock
- private IUserAdminOperation userAdminOperation;
+ private UserAdminOperation userAdminOperation;
@Mock
private ToscaOperationFacade toscaOperationFacade;
@Mock
@@ -117,6 +126,7 @@ public class UpgradeMigration1710Test {
private static ConfigurationManager configurationManager;
private static List<String> resources = Stream.of("org.openecomp.resource.cp.extCP").collect(Collectors.toList());
private static Map<String, List<String>> resourcesForUpgrade;
+ private static Configuration.EnvironmentContext environmentContext = new Configuration.EnvironmentContext();
private Resource resource;
private Service service;
@@ -148,6 +158,8 @@ public class UpgradeMigration1710Test {
configurationManager.getConfiguration().setMaxDeleteComponents(5);
configurationManager.getConfiguration().setEnableAutoHealing(true);
configurationManager.getConfiguration().setToscaConformanceLevel("5.0");
+ environmentContext.setDefaultValue("General_Revenue-Bearing");
+ configurationManager.getConfiguration().setEnvironmentContext(environmentContext);
HashMap<String, List<String>> resourcesForUpgrade = new HashMap();
resourcesForUpgrade.put("5.0", Lists.newArrayList("port"));
configurationManager.getConfiguration().setResourcesForUpgrade(resourcesForUpgrade);
@@ -169,8 +181,6 @@ public class UpgradeMigration1710Test {
when(responseFormat.getFormattedMessage())
.thenReturn("");
- when(componentUtils.getResponseFormat(any(ActionStatus.class), any()))
- .thenReturn(responseFormat);
when(componentUtils.convertFromStorageResponse(any(), any())).thenCallRealMethod();
mockChangeComponentState();
}
@@ -525,7 +535,7 @@ public class UpgradeMigration1710Test {
when(janusGraphDao.getByCriteria(any(), any(), any(), any()))
.thenReturn(Either.left(components));
- when(janusGraphDao.getParentVertecies(any(GraphVertex.class), any(EdgeLabelEnum.class), any(JsonParseFlagEnum.class)))
+ when(janusGraphDao.getParentVertices(any(GraphVertex.class), any(EdgeLabelEnum.class), any(JsonParseFlagEnum.class)))
//1th node to upgrade
.thenReturn(Either.left(components))
//parent of the 1th node - stop recursion
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigrationTest.java
index e07da1f102..2223d2847d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigrationTest.java
@@ -23,8 +23,8 @@ package org.openecomp.sdc.asdctool.migration.tasks.mig1802;
import org.junit.Test;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
-import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.dao.janusgraph.JanusGraphClient;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
public class SdcCatalogMigrationTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigrationTest.java
index d657625479..071a30fea2 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigrationTest.java
@@ -20,8 +20,6 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
-import static org.junit.Assert.assertThat;
-import java.math.BigInteger;
import org.hamcrest.core.Is;
import org.hamcrest.core.IsNull;
import org.junit.Before;
@@ -32,6 +30,10 @@ import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import java.math.BigInteger;
+
+import static org.junit.Assert.assertThat;
+
public class ForwardPathMigrationTest {
ForwardPathMigration forwardPathMigration = null;
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigrationTest.java
index 820ea05392..d11f70c4bb 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigrationTest.java
@@ -20,8 +20,6 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
-import static org.junit.Assert.assertThat;
-import java.math.BigInteger;
import org.hamcrest.core.Is;
import org.hamcrest.core.IsNull;
import org.junit.Before;
@@ -34,6 +32,10 @@ import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import java.math.BigInteger;
+
+import static org.junit.Assert.assertThat;
+
@RunWith(MockitoJUnitRunner.class)
public class ResourceLifecycleMigrationTest {
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java
index e40c4146f6..4721b1eb02 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigrationTest.java
@@ -52,6 +52,8 @@ public class SDCInstancesMigrationTest{
private JanusGraphDao janusGraphDao;
@Mock
private NodeTemplateOperation nodeTemplateOperation;
+ @Mock
+ GraphVertex topologyTemplateVertex;
@Test
@@ -79,10 +81,13 @@ public class SDCInstancesMigrationTest{
vertexOrig.setJson(jsonComposition);
vertexOrig.setType(ComponentTypeEnum.SERVICE);
list.add(vertexOrig);
-
+
+
+ when(janusGraphDao.getVertexById(any())).thenReturn(Either.left(vertexOrig));
when(janusGraphDao.getByCriteria(any(), any(), any(), any() )).thenReturn(Either.left(list));
when(nodeTemplateOperation.createInstanceEdge(vertexOrig, instance)).thenReturn(StorageOperationStatus.OK);
-
+ when(janusGraphDao.commit()).thenReturn(JanusGraphOperationStatus.OK);
+
MigrationResult migrate = instancesMigration.migrate();
MigrationStatus migrationStatus = migrate.getMigrationStatus();
assertEquals(MigrationStatus.COMPLETED, migrationStatus);
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigrationTest.java
index 51bbf705ad..03695367b0 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigrationTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SdcArchiveMigrationTest.java
@@ -20,8 +20,6 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
-import static org.junit.Assert.assertThat;
-import java.math.BigInteger;
import org.hamcrest.core.Is;
import org.hamcrest.core.IsNull;
import org.junit.Before;
@@ -32,6 +30,10 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.openecomp.sdc.asdctool.migration.core.DBVersion;
import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import java.math.BigInteger;
+
+import static org.junit.Assert.assertThat;
+
@RunWith(MockitoJUnitRunner.class)
public class SdcArchiveMigrationTest {
@Mock
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigrationTest.java
new file mode 100644
index 0000000000..328c5f3459
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigrationTest.java
@@ -0,0 +1,231 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import fj.data.Either;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.GroupDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyMap;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class SdcGroupsMigrationTest {
+
+ @Mock
+ private JanusGraphDao janusGraphDao;
+ @Mock
+ private GroupTypeOperation groupTypeOperation;
+ @Mock
+ private GraphVertex topologyTemplateVertex;
+ @Mock
+ private GraphVertex groupsVertex;
+
+ @InjectMocks
+ private SdcGroupsMigration groupsMigration;
+
+ @Before
+ public void setUp() {
+ groupsMigration = new SdcGroupsMigration(janusGraphDao, groupTypeOperation);
+ when(janusGraphDao.getVertexById(any())).thenReturn(Either.left(topologyTemplateVertex));
+ }
+
+ @Test
+ public void handleOneContainerWhenErrorHappened() {
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.right(JanusGraphOperationStatus.MATCH_NOT_FOUND));
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.MATCH_NOT_FOUND, status);
+ }
+
+ @Test
+ public void handleOneContainerWhenNoGroups() {
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.right(JanusGraphOperationStatus.NOT_FOUND));
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.OK, status);
+ }
+
+ @Test
+ public void handleOneContainerWhenGroupsShouldNotBeUpdated() {
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(groupsVertex));
+
+ doReturn(buildGroupInstancesMap(new String[] {"org.openecomp.groups.heat.HeatStack", "org.openecomp.groups.VfModule"}, new String[]{}))
+ .when(groupsVertex).getJson();
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.OK, status);
+ verify(janusGraphDao, times(0)).commit();
+
+ }
+
+ @Test
+ public void handleOneContainerWhenGroupsShouldBeUpdated() {
+ mockLatestGroupMapCreating();
+ mockUpgradeHappyFlow();
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.OK, status);
+ verify(janusGraphDao, times(1)).commit();
+ verify(janusGraphDao, times(0)).rollback();
+ }
+
+ private void mockUpgradeHappyFlow() {
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(groupsVertex));
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(groupsVertex));
+ when(janusGraphDao.commit()).thenReturn(JanusGraphOperationStatus.OK);
+ doReturn(buildGroupInstancesMap(new String[] {"org.openecomp.groups.NetworkCollection", "org.openecomp.groups.VfcInstanceGroup"},
+ new String[] {"old1", "old2"}))
+ .when(groupsVertex).getJson();
+ }
+
+ @Test
+ public void handleOneContainerWhenGroupsAlreadyUpdated() {
+ mockLatestGroupMapCreating();
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(groupsVertex));
+ doReturn(buildGroupInstancesMap(new String[] {"org.openecomp.groups.NetworkCollection"},
+ new String[] {"a", "b", "c", "d"}))
+ .when(groupsVertex).getJson();
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.OK, status);
+ verify(janusGraphDao, times(0)).commit();
+ verify(janusGraphDao, times(1)).rollback();
+ }
+
+ @Test
+ public void handleOneContainerWhenExceptionIsThrown() {
+ when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+ .thenThrow(new RuntimeException());
+ StorageOperationStatus status = groupsMigration.handleOneContainer(topologyTemplateVertex);
+ assertEquals(StorageOperationStatus.GENERAL_ERROR, status);
+ verify(janusGraphDao, times(0)).commit();
+ verify(janusGraphDao, times(1)).rollback();
+
+ }
+
+ // A temp remark for this test - following Commit hash: 08595ad21b0c409c69e3902232f5575963199e3e [ASDC-641] – Migration workaround for deployment artifact timeout. Reviewer: Lior.
+// @Test
+// public void migrateWhenExceptionIsThrown() {
+// List<GraphVertex> vertexList = new ArrayList<>();
+// vertexList.add(topologyTemplateVertex);
+// mockLatestGroupMapCreating();
+// when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+// .thenReturn(Either.left(vertexList));
+// when(janusGraphDao.getChildVertex(any(GraphVertex.class), eq(EdgeLabelEnum.GROUPS), eq(JsonParseFlagEnum.ParseAll)))
+// .thenThrow(new RuntimeException());
+// assertEquals(MigrationResult.MigrationStatus.FAILED, groupsMigration.migrate().getMigrationStatus());
+// verify(janusGraphDao, times(0)).commit();
+// verify(janusGraphDao, times(1)).rollback();
+//
+// }
+
+ @Test
+ public void migrateWhenGroupsShouldBeUpdated() {
+ List<GraphVertex> vertexList = new ArrayList<>();
+ vertexList.add(topologyTemplateVertex);
+ mockLatestGroupMapCreating();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(vertexList));
+ mockUpgradeHappyFlow();
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, groupsMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(1)).commit();
+ verify(janusGraphDao, times(0)).rollback();
+ }
+
+
+ private Map<String, GroupDataDefinition> buildGroupInstancesMap(String[] groupTypes, String [] propertyNames) {
+ Map<String, GroupDataDefinition> groupsMap = new HashMap<>();
+ for (String type: groupTypes) {
+ GroupDataDefinition gr = new GroupDataDefinition();
+ gr.setType(type);
+
+ gr.setProperties(createInstanceProperties(propertyNames));
+ groupsMap.put(gr.getType(), gr);
+ }
+ return groupsMap;
+ }
+
+ private void mockLatestGroupMapCreating() {
+ doReturn(Either.left(createTypeDefinition(new String[] {"a", "b", "c", "d"})))
+ .when(groupTypeOperation).getLatestGroupTypeByType(eq(SdcGroupsMigration.GroupsForUpgrade.NW_COLLECTION_GROUP_NAME.getToscaType()), eq(false));
+ doReturn(Either.left(createTypeDefinition(new String[] {"l", "m", "n", "o", "p"})))
+ .when(groupTypeOperation).getLatestGroupTypeByType(eq(SdcGroupsMigration.GroupsForUpgrade.VFC_INSTANCE_GROUP_NAME.getToscaType()), eq(false));
+ groupsMigration.loadLatestGroupTypeDefinitions();
+
+ }
+
+ private GroupTypeDefinition createTypeDefinition(String[] propertyNames) {
+ GroupTypeDefinition typeDefinition = new GroupTypeDefinition();
+ typeDefinition.setProperties(createTypeProperties(propertyNames));
+ return typeDefinition;
+ }
+
+ private List<PropertyDefinition> createTypeProperties(String[] propertyNames) {
+ List<PropertyDefinition> propertyDefinitionList = new ArrayList<>();
+ for (String name: propertyNames) {
+ PropertyDefinition propertyDefinition = new PropertyDefinition();
+ propertyDefinition.setName(name);
+ propertyDefinitionList.add(propertyDefinition);
+ }
+ return propertyDefinitionList;
+ }
+
+ private List<PropertyDataDefinition> createInstanceProperties(String[] propertyNames) {
+ List<PropertyDataDefinition> propertyDefinitionList = new ArrayList<>();
+ for (String name: propertyNames) {
+ PropertyDefinition propertyDefinition = new PropertyDefinition();
+ propertyDefinition.setName(name);
+ propertyDefinitionList.add(propertyDefinition);
+ }
+ return propertyDefinitionList;
+ }
+
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigrationTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigrationTest.java
new file mode 100644
index 0000000000..172b8a63f2
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigrationTest.java
@@ -0,0 +1,213 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import fj.data.Either;
+import org.assertj.core.util.Lists;
+import org.assertj.core.util.Maps;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
+
+import java.util.HashMap;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.anyMap;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+@RunWith(MockitoJUnitRunner.class)
+public class SdcResourceIconMigrationTest {
+ @Mock
+ private JanusGraphDao janusGraphDao;
+
+ @Mock
+ private GraphVertex graphVertex;
+
+ @Mock
+ private GraphVertex topologyTemplateVertex;
+
+ @Mock
+ private CompositionDataDefinition compositionDataDefinition;
+
+ @Mock
+ private ComponentInstanceDataDefinition componentInstanceDataDefinition;
+
+ @InjectMocks
+ private SdcResourceIconMigration iconMigration;
+
+ @Before
+ public void setUp() {
+ iconMigration = new SdcResourceIconMigration(janusGraphDao);
+ when(janusGraphDao.getVertexById(any())).thenReturn(Either.left(topologyTemplateVertex));
+ when(janusGraphDao.commit()).thenReturn(JanusGraphOperationStatus.OK);
+ }
+
+
+ @Test
+ public void migrationFailedWhenNoNodeTypeDefined() {
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap(), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.emptyList()));
+ assertEquals(MigrationResult.MigrationStatus.FAILED, iconMigration.migrate().getMigrationStatus());
+ }
+
+ @Test
+ public void resourceIsNotUpdatedIfNotVL() {
+ //iconMigration.handleOneContainer(graphVertex);
+ mockInstancesNotFoundFlow();
+
+ iconMigration.updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.VL);
+ assertFalse(iconMigration.updateIconInsideInstance(componentInstanceDataDefinition));
+ }
+
+
+
+ @Test
+ public void resourceIsUpdatedIfCP() {
+ //iconMigration.handleOneContainer(graphVertex);
+ mockInstancesFoundFlow();
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex));
+ iconMigration.updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.CP);
+ assertTrue(iconMigration.updateIconInsideInstance(componentInstanceDataDefinition));
+ }
+
+ @Test
+ public void migrateWhenIconsAreUpdated() {
+ mockInstancesFoundFlow();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+ when(compositionDataDefinition.getComponentInstances()).thenReturn(Maps.newHashMap("a", componentInstanceDataDefinition));
+ doReturn(Maps.newHashMap(JsonConstantKeysEnum.COMPOSITION.getValue(), compositionDataDefinition)).when(topologyTemplateVertex).getJson();
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex));
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(3)).commit();
+ }
+
+ @Test
+ public void migrateWhenIconsNotUpdated() {
+ mockInstancesNotFoundFlow();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+ when(compositionDataDefinition.getComponentInstances()).thenReturn(Maps.newHashMap("a", componentInstanceDataDefinition));
+ doReturn(Maps.newHashMap(JsonConstantKeysEnum.COMPOSITION.getValue(), compositionDataDefinition)).when(topologyTemplateVertex).getJson();
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(2)).commit();
+ }
+
+ @Test
+ public void migrateWhenNoInstancesFound() {
+ mockInstancesNotFoundFlow();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(2)).commit();
+ }
+
+ // A temp remark for this test - following Commit hash: 08595ad21b0c409c69e3902232f5575963199e3e [ASDC-641] – Migration workaround for deployment artifact timeout. Reviewer: Lior.
+// @Test
+// public void migrationFailedWhenInstanceVertexUpdateFailed() {
+// mockInstancesFoundFlow();
+// when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+// .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+// when(compositionDataDefinition.getComponentInstances()).thenReturn(Maps.newHashMap("a", componentInstanceDataDefinition));
+// doReturn(Maps.newHashMap(JsonConstantKeysEnum.COMPOSITION.getValue(), compositionDataDefinition)).when(topologyTemplateVertex).getJson();
+// when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex))
+// .thenReturn(Either.left(graphVertex))
+// .thenReturn(Either.right(JanusGraphOperationStatus.GENERAL_ERROR));
+//
+// assertEquals(MigrationResult.MigrationStatus.FAILED, iconMigration.migrate().getMigrationStatus());
+// verify(janusGraphDao, times(2)).commit();
+// }
+
+ @Test
+ public void migrationCompletedWhenVertexJsonIsEmpty() {
+ mockInstancesFoundFlow();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+ doReturn(new HashMap<>()).when(topologyTemplateVertex).getJson();
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex));
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(2)).commit();
+ }
+
+ @Test
+ public void migrationCompletedWhenVertexJsonIsNull() {
+ mockInstancesFoundFlow();
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.TOPOLOGY_TEMPLATE), eq(null), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(topologyTemplateVertex)));
+ doReturn(null).when(topologyTemplateVertex).getJson();
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex));
+
+ assertEquals(MigrationResult.MigrationStatus.COMPLETED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(2)).commit();
+ }
+
+ @Test
+ public void migrationFailedWhenTypeUpdateFailed() {
+ mockInstancesFoundFlow();
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.right(JanusGraphOperationStatus.GENERAL_ERROR));
+ assertEquals(MigrationResult.MigrationStatus.FAILED, iconMigration.migrate().getMigrationStatus());
+ verify(janusGraphDao, times(0)).commit();
+ }
+
+ private void mockInstancesNotFoundFlow() {
+ List<GraphVertex> nodeTypeVertexList = Lists.newArrayList(graphVertex);
+ when(graphVertex.getMetadataProperty(GraphPropertyEnum.NAME)).thenReturn("vl1");
+ when(componentInstanceDataDefinition.getComponentName()).thenReturn("other");
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap(), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(nodeTypeVertexList));
+ when(janusGraphDao.updateVertex(any(GraphVertex.class))).thenReturn(Either.left(graphVertex));
+ }
+
+ private void mockInstancesFoundFlow() {
+ when(graphVertex.getMetadataProperty(GraphPropertyEnum.NAME)).thenReturn(String.valueOf("cp1"));
+ when(componentInstanceDataDefinition.getComponentName()).thenReturn("cp1");
+ when(janusGraphDao.getByCriteria(eq(VertexTypeEnum.NODE_TYPE), anyMap(), anyMap(), eq(JsonParseFlagEnum.ParseAll)))
+ .thenReturn(Either.left(Lists.newArrayList(graphVertex)));
+ }
+
+
+}
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/utils/ReportWriterTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/utils/ReportWriterTest.java
index 95e50b723e..f8e2c5ca5d 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/utils/ReportWriterTest.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/utils/ReportWriterTest.java
@@ -34,6 +34,7 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
+
import java.io.IOException;
@RunWith(MockitoJUnitRunner.class)