diff options
author | LaMont, William (wl2432) <wl2432@us.att.com> | 2019-01-17 13:48:07 -0500 |
---|---|---|
committer | Kajur, Harish (vk250x) <vk250x@att.com> | 2019-01-22 10:46:02 -0500 |
commit | 090957c44f428e13f4f0898f27e017196ad86fda (patch) | |
tree | 6f5fa96d5ffc17f9c3d6f81d8e0245a209036214 /src/main | |
parent | b81d8172dc1d09acb535b78740e27bfaf0c7dc6d (diff) |
merge of v15 changes to onap
Issue-ID: AAI-2088
Change-Id: I588499af58d0e799d860b2b4362da7c48050fafd
Signed-off-by: LaMont, William (wl2432) <wl2432@us.att.com>
Signed-off-by: Kajur, Harish (vk250x) <vk250x@att.com>
Diffstat (limited to 'src/main')
64 files changed, 11869 insertions, 2010 deletions
diff --git a/src/main/docker/docker-entrypoint.sh b/src/main/docker/docker-entrypoint.sh index 79adc2b..76c8d5e 100644 --- a/src/main/docker/docker-entrypoint.sh +++ b/src/main/docker/docker-entrypoint.sh @@ -79,8 +79,7 @@ fi; MIN_HEAP_SIZE=${MIN_HEAP_SIZE:-512m}; MAX_HEAP_SIZE=${MAX_HEAP_SIZE:-1024m}; -MAX_PERM_SIZE=${MAX_PERM_SIZE:-512m}; -PERM_SIZE=${PERM_SIZE:-512m}; +MAX_METASPACE_SIZE=${MAX_METASPACE_SIZE:-512m}; JAVA_CMD="exec gosu aaiadmin java"; @@ -91,8 +90,7 @@ JVM_OPTS="${JVM_OPTS} -Xmx${MAX_HEAP_SIZE}"; JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails"; JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps"; -JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=${MAX_PERM_SIZE}"; -JVM_OPTS="${JVM_OPTS} -XX:PermSize=${PERM_SIZE}"; +JVM_OPTS="${JVM_OPTS} -XX:MaxMetaspaceSize=${MAX_METASPACE_SIZE}"; JVM_OPTS="${JVM_OPTS} -server"; JVM_OPTS="${JVM_OPTS} -XX:NewSize=512m"; @@ -127,6 +125,7 @@ JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_VERSION=${AAI_BUILD_VERSION}"; JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom"; JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=./resources/logback.xml"; JAVA_OPTS="${JAVA_OPTS} -Dloader.path=$APP_HOME/resources"; +JAVA_OPTS="${JAVA_OPTS} -Dgroovy.use.classvalue=true"; JAVA_OPTS="${JAVA_OPTS} ${POST_JAVA_OPTS}"; JAVA_MAIN_JAR=$(ls lib/aai-graphadmin*.jar); diff --git a/src/main/java/org/onap/aai/GraphAdminApp.java b/src/main/java/org/onap/aai/GraphAdminApp.java index aa9c457..554a014 100644 --- a/src/main/java/org/onap/aai/GraphAdminApp.java +++ b/src/main/java/org/onap/aai/GraphAdminApp.java @@ -19,13 +19,21 @@ */ package org.onap.aai; +import com.att.eelf.configuration.Configuration; import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; +import org.apache.commons.lang3.exception.ExceptionUtils; import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; +import java.util.Properties; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.logging.LogFormatTools; import org.onap.aai.logging.LoggingContext; import org.onap.aai.nodes.NodeIngestor; import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.ExceptionTranslator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; @@ -55,6 +63,7 @@ import java.util.UUID; "org.onap.aai.interceptors", "org.onap.aai.datasnapshot", "org.onap.aai.datagrooming", + "org.onap.aai.dataexport", "org.onap.aai.datacleanup" }) @EnableAsync @@ -65,6 +74,9 @@ public class GraphAdminApp { public static final String APP_NAME = "GraphAdmin"; private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(GraphAdminApp.class); + private static final String FROMAPPID = "AAI-GA"; + private static final String TRANSID = UUID.randomUUID().toString(); + @Autowired private Environment env; @@ -74,7 +86,10 @@ public class GraphAdminApp { @PostConstruct private void initialize(){ loadDefaultProps(); + initializeLoggingContext(); + } + private static void initializeLoggingContext() { LoggingContext.save(); LoggingContext.component("init"); LoggingContext.partnerName("NA"); @@ -92,11 +107,27 @@ public class GraphAdminApp { public static void main(String[] args) throws Exception { loadDefaultProps(); - SpringApplication app = new SpringApplication(GraphAdminApp.class); - app.setRegisterShutdownHook(true); - app.addInitializers(new PropertyPasswordConfiguration()); - Environment env = app.run(args).getEnvironment(); + ErrorLogHelper.loadProperties(); + initializeLoggingContext(); + + Environment env =null; + AAIConfig.init(); + try { + SpringApplication app = new SpringApplication(GraphAdminApp.class); + app.setRegisterShutdownHook(true); + app.addInitializers(new PropertyPasswordConfiguration()); + env = app.run(args).getEnvironment(); + } + catch(Exception ex){ + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(ex); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + LOGGER.error("Problems starting GraphAdminApp "+aai.getMessage()); + ErrorLogHelper.logException(aai); + ErrorLogHelper.logError(aai.getCode(), ex.getMessage() + ", resolve and restart GraphAdmin"); + throw aai; + } LOGGER.info( "Application '{}' is running on {}!" , env.getProperty("spring.application.name"), @@ -106,7 +137,7 @@ public class GraphAdminApp { // to the SchemaGenerator needs the bean and during the constructor // the Spring Context is not yet initialized - AAIConfig.init(); + AAIGraph.getInstance(); System.setProperty("org.onap.aai.graphadmin.started", "true"); @@ -116,6 +147,8 @@ public class GraphAdminApp { System.out.println("GraphAdmin Microservice Started"); } + + public static void loadDefaultProps(){ if(System.getProperty("AJSC_HOME") == null){ @@ -126,4 +159,5 @@ public class GraphAdminApp { System.setProperty("BUNDLECONFIG_DIR", "src/main/resources"); } } + } diff --git a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java index a281d70..f4372c1 100644 --- a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java +++ b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java @@ -1,302 +1,302 @@ -/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.datacleanup;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.FileTime;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.util.AAIConfig;
-import org.onap.aai.util.AAIConstants;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Component;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-
-@Component
-@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
-public class DataCleanupTasks {
-
- private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataCleanupTasks.class);
- private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
- /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
- logDir is the {project_home}/logs
- archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
- ageZip is the number of days after which the file will be moved to the ARCHIVE folder.
- ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
- */
- @Scheduled(cron = "${datagroomingcleanup.cron}" )
- public void dataGroomingCleanup() throws AAIException, Exception {
-
- logger.info("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
-
- try {
- String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
- String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";
- String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
- String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";
- File path = new File(dataGroomingDir);
- File archivepath = new File(archiveDir);
- File dataGroomingPath = new File(dataGroomingArcDir);
-
- logger.info("The logDir is " + logDir);
- logger.info("The dataGroomingDir is " + dataGroomingDir);
- logger.info("The archiveDir is " + archiveDir );
- logger.info("The dataGroomingArcDir is " + dataGroomingArcDir );
-
- boolean exists = directoryExists(logDir);
- logger.info("Directory" + logDir + "exists: " + exists);
- if(!exists)
- logger.error("The directory" + logDir +"does not exists");
-
- Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");
- Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");
-
- Date newAgeZip = getZipDate(ageZip);
-
- //Iterate through the dataGroomingDir
- File[] listFiles = path.listFiles();
- if(listFiles != null) {
- for(File listFile : listFiles) {
- if (listFile.toString().contains("ARCHIVE")){
- continue;
- }
- if(listFile.isFile()){
- logger.info("The file name in dataGrooming: " +listFile.getName());
- Date fileCreateDate = fileCreationMonthDate(listFile);
- logger.info("The fileCreateDate in dataGrooming is " + fileCreateDate);
- if( fileCreateDate.compareTo(newAgeZip) < 0) {
- archive(listFile,archiveDir,dataGroomingArcDir);
- }
- }
- }
- }
-
- Date newAgeDelete = getZipDate(ageDelete);
- //Iterate through the archive/dataGrooming dir
- File[] listFilesArchive = dataGroomingPath.listFiles();
- if(listFilesArchive != null) {
- for(File listFileArchive : listFilesArchive) {
- if(listFileArchive.isFile()) {
- logger.info("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName());
- Date fileCreateDate = fileCreationMonthDate(listFileArchive);
- logger.info("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);
- if(fileCreateDate.compareTo(newAgeDelete) < 0) {
- delete(listFileArchive);
- }
- }
- }
- }
- }
- catch (Exception e) {
- ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
- logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
- throw e;
- }
- }
-
- /**
- * This method checks if the directory exists
- * @param DIR
- *
- */
- public boolean directoryExists(String dir) {
- File path = new File(dir);
- boolean exists = path.exists();
- return exists;
- }
-
- public Date getZipDate(Integer days) throws Exception {
- return getZipDate(days, new Date());
- }
-
- public Date getZipDate(Integer days, Date date) throws Exception{
-
- Calendar cal = Calendar.getInstance();
- logger.info("The current date is " + date );
- cal.setTime(date);
- cal.add(Calendar.DATE, -days);
- Date newAgeZip = cal.getTime();
- logger.info("The newAgeDate is " +newAgeZip);
- return newAgeZip;
- }
-
-
- public Date fileCreationMonthDate (File file) throws Exception {
-
- BasicFileAttributes attr = Files.readAttributes(file.toPath(),
- BasicFileAttributes.class);
- FileTime time = attr.creationTime();
- String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );
- Date d = simpleDateFormat.parse(formatted);
- return d;
- }
-
- /**
- * This method will zip the files and add it to the archive folder
- * Checks if the archive folder exists, if not then creates one
- * After adding the file to archive folder it deletes the file from the filepath
- * @throws AAIException
- * @throws Exception
- */
- public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception {
-
- logger.info("Inside the archive folder");
- String filename = file.getName();
- logger.info("file name is " +filename);
- File archivepath = new File(archiveDir);
-
- String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
-
- File dataGroomingPath = new File(afterArchiveDir);
-
- boolean exists = directoryExists(archiveDir);
- logger.info("Directory" + archiveDir + "exists: " + exists);
- if(!exists) {
- logger.error("The directory" + archiveDir +"does not exists so will create a new archive folder");
- //Create an archive folder if does not exists
- boolean flag = dataGroomingPath.mkdirs();
- if(!flag)
- logger.error("Failed to create ARCHIVE folder");
- }
- try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".gz");
- ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);
- FileInputStream inputstream = new FileInputStream(file)) {
- ZipEntry ze = new ZipEntry(file.getName());
- zoutputstream.putNextEntry(ze);
- byte[] buffer = new byte[1024];
- int len;
- while ((len = inputstream.read(buffer)) > 0) {
- zoutputstream.write(buffer,0,len);
- }
- //close all the sources
- zoutputstream.closeEntry();
- //Delete the file after been added to archive folder
- delete(file);
- logger.info("The file archived is " + file + " at " + afterArchiveDir );
- }
- catch (IOException e) {
- ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup " + e.getStackTrace());
- logger.info("AAI_4000", "Exception running cron job for DataCleanup", e);
- throw e;
- }
- }
-
- /**
- * This method will delete all the files from the archive folder that are older than 60 days
- * @param file
- */
- public static void delete(File file) {
-
- logger.info("Deleting the file " + file);
- boolean deleteStatus = file.delete();
- if(!deleteStatus){
- logger.error("Failed to delete the file" +file);
- }
- }
-
- /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
- logDir is the {project_home}/logs
- archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
- ageZip is the number of days after which the file will be moved to the ARCHIVE folder.
- ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
-*/
- @Scheduled(cron = "${datasnapshotcleanup.cron}" )
- public void dataSnapshotCleanup() throws AAIException, Exception {
-
- logger.info("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
-
- try {
- String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
- String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
- String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
- String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";
- File path = new File(dataSnapshotDir);
- File archivepath = new File(archiveDir);
- File dataSnapshotPath = new File(dataSnapshotArcDir);
-
- logger.info("The logDir is " + logDir);
- logger.info("The dataSnapshotDir is " + dataSnapshotDir);
- logger.info("The archiveDir is " + archiveDir );
- logger.info("The dataSnapshotArcDir is " + dataSnapshotArcDir );
-
- boolean exists = directoryExists(logDir);
- logger.info("Directory" + logDir + "exists: " + exists);
- if(!exists)
- logger.error("The directory" + logDir +"does not exists");
-
- Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");
- Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");
-
- Date newAgeZip = getZipDate(ageZipSnapshot);
-
- //Iterate through the dataGroomingDir
- File[] listFiles = path.listFiles();
- if(listFiles != null) {
- for(File listFile : listFiles) {
- if (listFile.toString().contains("ARCHIVE")){
- continue;
- }
- if(listFile.isFile()){
- logger.info("The file name in dataSnapshot: " +listFile.getName());
- Date fileCreateDate = fileCreationMonthDate(listFile);
- logger.info("The fileCreateDate in dataSnapshot is " + fileCreateDate);
- if( fileCreateDate.compareTo(newAgeZip) < 0) {
- archive(listFile,archiveDir,dataSnapshotArcDir);
- }
- }
- }
- }
-
- Date newAgeDelete = getZipDate(ageDeleteSnapshot);
- //Iterate through the archive/dataSnapshots dir
- File[] listFilesArchive = dataSnapshotPath.listFiles();
- if(listFilesArchive != null) {
- for(File listFileArchive : listFilesArchive) {
- if(listFileArchive.isFile()) {
- logger.info("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName());
- Date fileCreateDate = fileCreationMonthDate(listFileArchive);
- logger.info("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);
- if(fileCreateDate.compareTo(newAgeDelete) < 0) {
- delete(listFileArchive);
- }
- }
- }
- }
- }
- catch (Exception e) {
- ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
- logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
- throw e;
- }
- }
-}
+/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.datacleanup; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileTime; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.AAIConstants; +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +@Component +@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties") +public class DataCleanupTasks { + + private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataCleanupTasks.class); + private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd"); + /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure. + logDir is the {project_home}/logs + archiveDir is the ARCHIVE directory where the files will be stored after 5 days. + ageZip is the number of days after which the file will be moved to the ARCHIVE folder. + ageDelete is the number of days after which the data files will be deleted i.e after 30 days. + */ + @Scheduled(cron = "${datagroomingcleanup.cron}" ) + public void dataGroomingCleanup() throws AAIException, Exception { + + logger.info("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date())); + + try { + String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; + String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming"; + String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE"; + String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming"; + File path = new File(dataGroomingDir); + File archivepath = new File(archiveDir); + File dataGroomingPath = new File(dataGroomingArcDir); + + logger.info("The logDir is " + logDir); + logger.info("The dataGroomingDir is " + dataGroomingDir); + logger.info("The archiveDir is " + archiveDir ); + logger.info("The dataGroomingArcDir is " + dataGroomingArcDir ); + + boolean exists = directoryExists(logDir); + logger.info("Directory" + logDir + "exists: " + exists); + if(!exists) + logger.error("The directory" + logDir +"does not exists"); + + Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip"); + Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete"); + + Date newAgeZip = getZipDate(ageZip); + + //Iterate through the dataGroomingDir + File[] listFiles = path.listFiles(); + if(listFiles != null) { + for(File listFile : listFiles) { + if (listFile.toString().contains("ARCHIVE")){ + continue; + } + if(listFile.isFile()){ + logger.info("The file name in dataGrooming: " +listFile.getName()); + Date fileCreateDate = fileCreationMonthDate(listFile); + logger.info("The fileCreateDate in dataGrooming is " + fileCreateDate); + if( fileCreateDate.compareTo(newAgeZip) < 0) { + archive(listFile,archiveDir,dataGroomingArcDir); + } + } + } + } + + Date newAgeDelete = getZipDate(ageDelete); + //Iterate through the archive/dataGrooming dir + File[] listFilesArchive = dataGroomingPath.listFiles(); + if(listFilesArchive != null) { + for(File listFileArchive : listFilesArchive) { + if(listFileArchive.isFile()) { + logger.info("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName()); + Date fileCreateDate = fileCreationMonthDate(listFileArchive); + logger.info("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate); + if(fileCreateDate.compareTo(newAgeDelete) < 0) { + delete(listFileArchive); + } + } + } + } + } + catch (Exception e) { + ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString()); + logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString()); + throw e; + } + } + + /** + * This method checks if the directory exists + * @param DIR + * + */ + public boolean directoryExists(String dir) { + File path = new File(dir); + boolean exists = path.exists(); + return exists; + } + + public Date getZipDate(Integer days) throws Exception { + return getZipDate(days, new Date()); + } + + public Date getZipDate(Integer days, Date date) throws Exception{ + + Calendar cal = Calendar.getInstance(); + logger.info("The current date is " + date ); + cal.setTime(date); + cal.add(Calendar.DATE, -days); + Date newAgeZip = cal.getTime(); + logger.info("The newAgeDate is " +newAgeZip); + return newAgeZip; + } + + + public Date fileCreationMonthDate (File file) throws Exception { + + BasicFileAttributes attr = Files.readAttributes(file.toPath(), + BasicFileAttributes.class); + FileTime time = attr.creationTime(); + String formatted = simpleDateFormat.format( new Date( time.toMillis() ) ); + Date d = simpleDateFormat.parse(formatted); + return d; + } + + /** + * This method will zip the files and add it to the archive folder + * Checks if the archive folder exists, if not then creates one + * After adding the file to archive folder it deletes the file from the filepath + * @throws AAIException + * @throws Exception + */ + public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception { + + logger.info("Inside the archive folder"); + String filename = file.getName(); + logger.info("file name is " +filename); + File archivepath = new File(archiveDir); + + String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename; + + File dataGroomingPath = new File(afterArchiveDir); + + boolean exists = directoryExists(archiveDir); + logger.info("Directory" + archiveDir + "exists: " + exists); + if(!exists) { + logger.error("The directory" + archiveDir +"does not exists so will create a new archive folder"); + //Create an archive folder if does not exists + boolean flag = dataGroomingPath.mkdirs(); + if(!flag) + logger.error("Failed to create ARCHIVE folder"); + } + try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".gz"); + ZipOutputStream zoutputstream = new ZipOutputStream(outputstream); + FileInputStream inputstream = new FileInputStream(file)) { + ZipEntry ze = new ZipEntry(file.getName()); + zoutputstream.putNextEntry(ze); + byte[] buffer = new byte[1024]; + int len; + while ((len = inputstream.read(buffer)) > 0) { + zoutputstream.write(buffer,0,len); + } + //close all the sources + zoutputstream.closeEntry(); + //Delete the file after been added to archive folder + delete(file); + logger.info("The file archived is " + file + " at " + afterArchiveDir ); + } + catch (IOException e) { + ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup " + e.getStackTrace()); + logger.info("AAI_4000", "Exception running cron job for DataCleanup", e); + throw e; + } + } + + /** + * This method will delete all the files from the archive folder that are older than 60 days + * @param file + */ + public static void delete(File file) { + + logger.info("Deleting the file " + file); + boolean deleteStatus = file.delete(); + if(!deleteStatus){ + logger.error("Failed to delete the file" +file); + } + } + + /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure. + logDir is the {project_home}/logs + archiveDir is the ARCHIVE directory where the files will be stored after 5 days. + ageZip is the number of days after which the file will be moved to the ARCHIVE folder. + ageDelete is the number of days after which the data files will be deleted i.e after 30 days. +*/ + @Scheduled(cron = "${datasnapshotcleanup.cron}" ) + public void dataSnapshotCleanup() throws AAIException, Exception { + + logger.info("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date())); + + try { + String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; + String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots"; + String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE"; + String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots"; + File path = new File(dataSnapshotDir); + File archivepath = new File(archiveDir); + File dataSnapshotPath = new File(dataSnapshotArcDir); + + logger.info("The logDir is " + logDir); + logger.info("The dataSnapshotDir is " + dataSnapshotDir); + logger.info("The archiveDir is " + archiveDir ); + logger.info("The dataSnapshotArcDir is " + dataSnapshotArcDir ); + + boolean exists = directoryExists(logDir); + logger.info("Directory" + logDir + "exists: " + exists); + if(!exists) + logger.error("The directory" + logDir +"does not exists"); + + Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip"); + Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete"); + + Date newAgeZip = getZipDate(ageZipSnapshot); + + //Iterate through the dataGroomingDir + File[] listFiles = path.listFiles(); + if(listFiles != null) { + for(File listFile : listFiles) { + if (listFile.toString().contains("ARCHIVE")){ + continue; + } + if(listFile.isFile()){ + logger.info("The file name in dataSnapshot: " +listFile.getName()); + Date fileCreateDate = fileCreationMonthDate(listFile); + logger.info("The fileCreateDate in dataSnapshot is " + fileCreateDate); + if( fileCreateDate.compareTo(newAgeZip) < 0) { + archive(listFile,archiveDir,dataSnapshotArcDir); + } + } + } + } + + Date newAgeDelete = getZipDate(ageDeleteSnapshot); + //Iterate through the archive/dataSnapshots dir + File[] listFilesArchive = dataSnapshotPath.listFiles(); + if(listFilesArchive != null) { + for(File listFileArchive : listFilesArchive) { + if(listFileArchive.isFile()) { + logger.info("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName()); + Date fileCreateDate = fileCreationMonthDate(listFileArchive); + logger.info("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate); + if(fileCreateDate.compareTo(newAgeDelete) < 0) { + delete(listFileArchive); + } + } + } + } + } + catch (Exception e) { + ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString()); + logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString()); + throw e; + } + } +} diff --git a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java new file mode 100644 index 0000000..359e2ba --- /dev/null +++ b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java @@ -0,0 +1,382 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.dataexport; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Properties; +import java.util.TreeMap; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.onap.aai.dbgen.DynamicPayloadGenerator; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.logging.LoggingContext; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.AAIConstants; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import com.att.eelf.configuration.Configuration; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +import org.apache.commons.io.comparator.LastModifiedFileComparator; +import org.apache.commons.io.filefilter.DirectoryFileFilter; +import org.apache.commons.io.filefilter.FileFileFilter; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.RegexFileFilter; + +/** + * DataExportTasks obtains a graph snapshot and invokes DynamicPayloadGenerator + * + */ +@Component +@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties") +public class DataExportTasks { + + private static final EELFLogger LOGGER; + private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); + private static final String GA_MS = "aai-graphadmin"; + + static { + System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName()); + Properties props = System.getProperties(); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG); + LOGGER = EELFManager.getInstance().getLogger(DataExportTasks.class); + } + + private LoaderFactory loaderFactory; + private EdgeIngestor edgeIngestor; + private SchemaVersions schemaVersions; + + @Autowired + public DataExportTasks(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){ + this.loaderFactory = loaderFactory; + this.edgeIngestor = edgeIngestor; + this.schemaVersions = schemaVersions; + } + + /** + * Scheduled task to invoke exportTask + */ + @Scheduled(cron = "${dataexporttask.cron}" ) + public void export() { + try { + exportTask(); + } + catch (Exception e) { + } + } + /** + * The exportTask method. + * + * @throws AAIException, Exception + */ + public void exportTask() throws AAIException, Exception { + + LoggingContext.init(); + LoggingContext.requestId(UUID.randomUUID().toString()); + LoggingContext.partnerName("AAI"); + LoggingContext.targetEntity(GA_MS); + LoggingContext.component("exportTask"); + LoggingContext.serviceName(GA_MS); + LoggingContext.targetServiceName("exportTask"); + LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE); + + if (AAIConfig.get("aai.dataexport.enable").equalsIgnoreCase("false")) { + LOGGER.info("Data Export is not enabled"); + return; + } + // Check if the process was started via command line + if (isDataExportRunning()) { + LOGGER.info("There is a dataExport process already running"); + return; + } + + LOGGER.info("Started exportTask: " + dateFormat.format(new Date())); + + String enableSchemaValidation = AAIConfig.get("aai.dataexport.enable.schema.validation", "false"); + String outputLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.output.location"); + String enableMultipleSnapshots = AAIConfig.get("aai.dataexport.enable.multiple.snapshots", "false"); + String nodeConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.node.config.location"); + String inputFilterConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.input.filter.config.location"); + String enablePartialGraph = AAIConfig.get("aai.dataexport.enable.partial.graph", "true"); + + // Check that the output location exist + File targetDirFile = new File(outputLocation); + if ( !targetDirFile.exists() ) { + targetDirFile.mkdir(); + } + else { + //Delete any existing payload files + deletePayload(targetDirFile); + } + + File snapshot = null; + String snapshotFilePath = null; + if ( "false".equalsIgnoreCase(enableMultipleSnapshots)){ + // find the second to latest data snapshot + snapshot = findSnapshot(); + snapshotFilePath = snapshot.getAbsolutePath(); + if ( "true".equalsIgnoreCase (enablePartialGraph) ) { + String[] command = new String[2]; + command[0] = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "bin" + AAIConstants.AAI_FILESEP + "dynamicPayloadPartial.sh"; + command[1] = snapshotFilePath; + runScript(command); + } + } + else { + snapshotFilePath = findMultipleSnapshots(); + } + + List<String> paramsList = new ArrayList<String>(); + paramsList.add("-s"); + paramsList.add(enableSchemaValidation); + paramsList.add("-o"); + paramsList.add(outputLocation); + paramsList.add("-m"); + paramsList.add(enableMultipleSnapshots); + paramsList.add("-n"); + paramsList.add(nodeConfigurationLocation); + paramsList.add("-i"); + paramsList.add(inputFilterConfigurationLocation); + paramsList.add("-p"); + paramsList.add(enablePartialGraph); + paramsList.add("-d"); + paramsList.add(snapshotFilePath); + + LOGGER.debug("paramsList is : " + paramsList); + + String[] paramsArray = paramsList.toArray(new String[0]); + try { + DynamicPayloadGenerator.run(loaderFactory, edgeIngestor, schemaVersions, paramsArray, false); + LOGGER.info("DynamicPaylodGenerator completed"); + // tar/gzip payload files + String[] command = new String[1]; + command[0] = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "bin" + AAIConstants.AAI_FILESEP + "dynamicPayloadArchive.sh"; + runScript(command); + } + catch (Exception e) { + ErrorLogHelper.logError("AAI_8003", e.getMessage()); + LOGGER.info("Exception running dataExport task " + e.getMessage()); + throw e; + } finally { + LOGGER.info("Completed dataExport task" ); + LoggingContext.clear(); + } + + } + /** + * The isDataExportRunning method, checks if the data export task was started separately via command line + * @return true if another process is running, false if not + */ + private static boolean isDataExportRunning(){ + + Process process = null; + + int count = 0; + try { + process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ynamicPayloadGenerator'").start(); + InputStream is = process.getInputStream(); + InputStreamReader isr = new InputStreamReader(is); + BufferedReader br = new BufferedReader(isr); + + while (br.readLine() != null){ + count++; + } + + int exitVal = process.waitFor(); + LOGGER.info("Check if dataExport is running returned: " + exitVal); + } catch (Exception e) { + ErrorLogHelper.logError("AAI_8002", "Exception while running the check to see if dataExport is running "+ e.getMessage()); + LOGGER.info("Exception while running the check to see if dataExport is running "+ e.getMessage()); + } + + if(count > 0){ + return true; + } else { + return false; + } + } + + /** + * The findSnapshot method tries to find the second to last data snapshot. If it can't find it, it returns the last one. + * @return a single snapshot File + */ + private static File findSnapshot() { + String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + + AAIConstants.AAI_FILESEP + "dataSnapshots"; + File snapshot = null; + File targetDirFile = new File(targetDir); + + File[] allFilesArr = targetDirFile.listFiles((FileFilter) FileFileFilter.FILE); + if ( allFilesArr == null || allFilesArr.length == 0 ) { + ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir); + LOGGER.info ("Unable to find data snapshots at " + targetDir); + return (snapshot); + } + if ( allFilesArr.length > 1 ) { + Arrays.sort(allFilesArr, LastModifiedFileComparator.LASTMODIFIED_REVERSE); + // need to use the second to last modified + snapshot = allFilesArr[1]; + } + else { + snapshot = allFilesArr[0]; + } + return (snapshot); + } + + /** + * The method findMultipleSnapshots looks in the data snapshots directory for a set of snapshot files that match the pattern. + * @return the file name prefix corresponding to the second to last set of snapshots + */ + private static String findMultipleSnapshots() { + String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + + AAIConstants.AAI_FILESEP + "dataSnapshots"; + String snapshotName = null; + File targetDirFile = new File(targetDir); + TreeMap<String,List<File>> fileMap = new TreeMap<String,List<File>>(String.CASE_INSENSITIVE_ORDER); + + /*dataSnapshot.graphSON.201804022009.P0 + dataSnapshot.graphSON.201804022009.P1 + dataSnapshot.graphSON.201804022009.P2 + dataSnapshot.graphSON.201804022009.P3 + dataSnapshot.graphSON.201804022009.P4*/ + String snapshotPattern = "^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$"; + Pattern p = Pattern.compile (snapshotPattern); + + FileFilter fileFilter = new RegexFileFilter("^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$"); + File[] allFilesArr = targetDirFile.listFiles(fileFilter); + + if ( allFilesArr == null || allFilesArr.length == 0 ) { + ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir); + LOGGER.info ("Unable to find data snapshots at " + targetDir); + return (null); + } + + if ( allFilesArr.length > 1 ) { + Arrays.sort(allFilesArr, LastModifiedFileComparator.LASTMODIFIED_REVERSE); + for ( File f : allFilesArr ) { + // find the second to last group of multiple snapshots + Matcher m = p.matcher(f.getPath()); + if ( m.matches() ) { + String g1 = m.group(1); + LOGGER.debug ("Found group " + g1); + if ( !fileMap.containsKey(g1) ) { + ArrayList<File> l = new ArrayList<File>(); + l.add(f); + fileMap.put(g1, l); + } + else { + List<File> l = fileMap.get(g1); + l.add(f); + fileMap.put(g1, l); + } + } + + } + if ( fileMap.size() > 1 ) { + NavigableMap<String,List<File>> dmap = fileMap.descendingMap(); + + Map.Entry<String,List<File>> fentry = dmap.firstEntry(); + LOGGER.debug ("First key in descending map " + fentry.getKey()); + + Map.Entry<String,List<File>> lentry = dmap.higherEntry(fentry.getKey()); + LOGGER.debug ("Next key in descending map " + lentry.getKey()); + + List<File> l = lentry.getValue(); + snapshotName = l.get(0).getAbsolutePath(); + // Remove the .P* extension + int lastDot = snapshotName.lastIndexOf('.'); + if ( lastDot > 0 ) { + snapshotName = snapshotName.substring(0,lastDot); + } + else { + LOGGER.info ("Invalid snapshot file name format " + snapshotName); + return null; + } + } + } + else { + return null; + } + return (snapshotName); + } + /** + * The deletePayload method deletes all the payload files that it finds at targetDirectory + * @param targetDirFile the directory that contains payload files + * @throws AAIException + */ + private static void deletePayload(File targetDirFile) throws AAIException { + + File[] allFilesArr = targetDirFile.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY); + if ( allFilesArr == null || allFilesArr.length == 0 ) { + LOGGER.info ("No payload files found at " + targetDirFile.getPath()); + return; + } + for ( File f : allFilesArr ) { + try { + FileUtils.deleteDirectory(f); + } + catch (IOException e) { + + LOGGER.info ("Unable to delete directory " + f.getAbsolutePath() + " " + e.getMessage()); + } + + } + + } + /** + * The runScript method runs a shell script/command with a variable number of arguments + * @param script The script/command arguments + */ + private static void runScript(String ...script ) { + Process process = null; + try { + process = new ProcessBuilder().command(script).start(); + int exitVal = process.waitFor(); + LOGGER.info("dynamicPayloadArchive.sh returned: " + exitVal); + } catch (Exception e) { + ErrorLogHelper.logError("AAI_8002", "Exception while running dynamicPayloadArchive.sh "+ e.getMessage()); + LOGGER.info("Exception while running dynamicPayloadArchive.sh" + e.getMessage()); + } + + } +} diff --git a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java index 199e704..e222228 100644 --- a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java +++ b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java @@ -47,6 +47,8 @@ import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.onap.aai.GraphAdminApp; +import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.util.GraphAdminConstants; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Introspector; @@ -60,6 +62,7 @@ import org.onap.aai.logging.LoggingContext; import org.onap.aai.edges.enums.AAIDirection; import org.onap.aai.edges.enums.EdgeProperty; import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.setup.SchemaVersion; import org.onap.aai.util.*; import org.onap.aai.logging.LoggingContext.StatusCode; @@ -107,8 +110,8 @@ public class DataGrooming { // at all nodes of the passed-in nodeType. int timeWindowMinutes = 0; - int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX; - int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES; + int maxRecordsToFix = GraphAdminConstants.AAI_GROOMING_DEFAULT_MAX_FIX; + int sleepMinutes = GraphAdminConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES; try { String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix"); if( maxFixStr != null && !maxFixStr.equals("") ){ @@ -121,7 +124,7 @@ public class DataGrooming { } catch ( Exception e ){ // Don't worry, we'll just use the defaults that we got from AAIConstants - LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. "); + LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. " + e.getMessage()); } String prevFileName = ""; @@ -129,141 +132,45 @@ public class DataGrooming { dupeGrpsDeleted = 0; FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); String dteStr = fd.getDateTime(); - cArgs = new CommandLineArgs(); + try { + String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix"); + if( maxFixStr != null && !maxFixStr.equals("") ){ + cArgs.maxRecordsToFix = Integer.parseInt(maxFixStr); + } + String sleepStr = AAIConfig.get("aai.grooming.default.sleep.minutes"); + if( sleepStr != null && !sleepStr.equals("") ){ + cArgs.sleepMinutes = Integer.parseInt(sleepStr); + } + } + catch ( Exception e ){ + // Don't worry, we'll just use the defaults that we got from AAIConstants + LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. " + e.getMessage()); + } + + JCommander jCommander = new JCommander(cArgs, args); jCommander.setProgramName(DataGrooming.class.getSimpleName()); //Print Defaults - LOGGER.info("EdgesOnlyFlag is" + cArgs.edgesOnlyFlag); - LOGGER.info("DoAutoFix is" + cArgs.doAutoFix); - LOGGER.info("skipHostCheck is" + cArgs.skipHostCheck); - LOGGER.info("dontFixOrphansFlag is" + cArgs.dontFixOrphansFlag); - LOGGER.info("singleCommits is" + cArgs.singleCommits); - LOGGER.info("dupeCheckOff is" + cArgs.dupeCheckOff); - LOGGER.info("dupeFixOn is" + cArgs.dupeFixOn); - LOGGER.info("ghost2CheckOff is" + cArgs.ghost2CheckOff); - LOGGER.info("ghost2FixOn is" + cArgs.ghost2FixOn); - LOGGER.info("neverUseCache is" + cArgs.neverUseCache); - LOGGER.info("skipEdgeChecks is" + cArgs.skipEdgeCheckFlag); - LOGGER.info("skipIndexUpdateFix is" + cArgs.skipIndexUpdateFix); - LOGGER.info("maxFix is" + cArgs.maxRecordsToFix); + LOGGER.info("EdgesOnlyFlag is [" + cArgs.edgesOnlyFlag + "]"); + LOGGER.info("DoAutoFix is [" + cArgs.doAutoFix + "]"); + LOGGER.info("skipHostCheck is [" + cArgs.skipHostCheck + "]"); + LOGGER.info("dontFixOrphansFlag is [" + cArgs.dontFixOrphansFlag + "]"); + LOGGER.info("dupeCheckOff is [" + cArgs.dupeCheckOff + "]"); + LOGGER.info("dupeFixOn is [" + cArgs.dupeFixOn + "]"); + LOGGER.info("ghost2CheckOff is [" + cArgs.ghost2CheckOff + "]"); + LOGGER.info("ghost2FixOn is [" + cArgs.ghost2FixOn + "]"); + LOGGER.info("neverUseCache is [" + cArgs.neverUseCache + "]"); + LOGGER.info("singleNodeType is [" + cArgs.singleNodeType + "]"); + LOGGER.info("skipEdgeChecks is [" + cArgs.skipEdgeCheckFlag + "]"); + LOGGER.info("skipIndexUpdateFix is [" + cArgs.skipIndexUpdateFix + "]"); + LOGGER.info("maxFix is [" + cArgs.maxRecordsToFix + "]"); - /*if (args.length > 0) { - // They passed some arguments in that will affect processing - for (int i = 0; i < args.length; i++) { - String thisArg = args[i]; - if (thisArg.equals("-edgesOnly")) { - edgesOnlyFlag = true; - } else if (thisArg.equals("-autoFix")) { - doAutoFix = true; - } else if (thisArg.equals("-skipHostCheck")) { - skipHostCheck = true; - } else if (thisArg.equals("-dontFixOrphans")) { - dontFixOrphansFlag = true; - } else if (thisArg.equals("-singleCommits")) { - singleCommits = true; - } else if (thisArg.equals("-dupeCheckOff")) { - dupeCheckOff = true; - } else if (thisArg.equals("-dupeFixOn")) { - dupeFixOn = true; - } else if (thisArg.equals("-ghost2CheckOff")) { - ghost2CheckOff = true; - } else if (thisArg.equals("-neverUseCache")) { - neverUseCache = true; - } else if (thisArg.equals("-ghost2FixOn")) { - ghost2FixOn = true; - } else if (thisArg.equals("-skipEdgeChecks")) { - skipEdgeCheckFlag = true; - } else if (thisArg.equals("-skipIndexUpdateFix")) { - skipIndexUpdateFix = true; - } else if (thisArg.equals("-maxFix")) { - i++; - if (i >= args.length) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" No value passed with -maxFix option. "); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - String nextArg = args[i]; - try { - maxRecordsToFix = Integer.parseInt(nextArg); - } catch (Exception e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -maxFix option: [" - + nextArg + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - } else if (thisArg.equals("-sleepMinutes")) { - i++; - if (i >= args.length) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("No value passed with -sleepMinutes option."); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - String nextArg = args[i]; - try { - sleepMinutes = Integer.parseInt(nextArg); - } catch (Exception e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -sleepMinutes option: [" - + nextArg + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - } else if (thisArg.equals("-timeWindowMinutes")) { - i++; - if (i >= args.length) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("No value passed with -timeWindowMinutes option."); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - String nextArg = args[i]; - try { - timeWindowMinutes = Integer.parseInt(nextArg); - } catch (Exception e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -timeWindowMinutes option: [" - + nextArg + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - - } else if (thisArg.equals("-f")) { - i++; - if (i >= args.length) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" No value passed with -f option. "); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - prevFileName = args[i]; - } else if (thisArg.equals("-singleNodeType")) { - i++; - if (i >= args.length) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" No value passed with -onlyThisNodeType option. "); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - singleNodeType = args[i]; - } else { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" Unrecognized argument passed to DataGrooming: [" - + thisArg + "]. "); - LOGGER.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -skipEdgeChecks -dupeFixOn -donFixOrphans -timeWindowMinutes -sleepMinutes -neverUseCache"); - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } - } - } */ String windowTag = "FULL"; //TODO??? - if( timeWindowMinutes > 0 ){ + if( cArgs.timeWindowMinutes > 0 ){ windowTag = "PARTIAL"; } String groomOutFileName = "dataGrooming." + windowTag + "." + dteStr + ".out"; @@ -279,7 +186,7 @@ public class DataGrooming { } try { - if (!prevFileName.equals("")) { + if (!cArgs.prevFileName.equals("")) { // They are trying to fix some data based on a data in a // previous file. LOGGER.info(" Call doTheGrooming() with a previous fileName [" @@ -287,9 +194,9 @@ public class DataGrooming { Boolean finalShutdownFlag = true; Boolean cacheDbOkFlag = false; doTheGrooming(prevFileName, cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag, - cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits, + cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn, - cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag, + finalShutdownFlag, cacheDbOkFlag, cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes, cArgs.singleNodeType, cArgs.skipIndexUpdateFix ); @@ -306,8 +213,8 @@ public class DataGrooming { Boolean cacheDbOkFlag = true; int fixCandCount = doTheGrooming("", cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix, groomOutFileName, - ver, cArgs.singleCommits, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn, - cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag, + ver, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn, + finalShutdownFlag, cacheDbOkFlag, cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes, cArgs.singleNodeType, cArgs.skipIndexUpdateFix ); if (fixCandCount == 0) { @@ -334,9 +241,9 @@ public class DataGrooming { cacheDbOkFlag = false; doTheGrooming(groomOutFileName, cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix, - secondGroomOutFileName, ver, cArgs.singleCommits, + secondGroomOutFileName, ver, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn, - cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag, + finalShutdownFlag, cacheDbOkFlag, cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes, cArgs.singleNodeType, cArgs.skipIndexUpdateFix ); } @@ -348,12 +255,12 @@ public class DataGrooming { Boolean cacheDbOkFlag = true; if( cArgs.neverUseCache ){ // They have forbidden us from using a cached db connection. - cArgs.cacheDbOkFlag = false; + cacheDbOkFlag = false; } doTheGrooming("", cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag, - cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits, + cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn, - cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag, + finalShutdownFlag, cacheDbOkFlag, cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes, cArgs.singleNodeType, cArgs.skipIndexUpdateFix ); } @@ -371,7 +278,7 @@ public class DataGrooming { * * @param args the arguments */ - public static void main(String[] args) { + public static void main(String[] args) throws AAIException { // Set the logging file properties to be used by EELFManager System.setProperty("aai.service.name", DataGrooming.class.getSimpleName()); @@ -390,11 +297,24 @@ public class DataGrooming { props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); DataGrooming dataGrooming = new DataGrooming(loaderFactory, schemaVersions); @@ -410,7 +330,6 @@ public class DataGrooming { * @param maxRecordsToFix the max records to fix * @param groomOutFileName the groom out file name * @param version the version - * @param singleCommits the single commits * @param dupeCheckOff the dupe check off * @param dupeFixOn the dupe fix on * @param ghost2CheckOff the ghost 2 check off @@ -422,7 +341,6 @@ public class DataGrooming { private int doTheGrooming( String fileNameForFixing, Boolean edgesOnlyFlag, Boolean dontFixOrphansFlag, int maxRecordsToFix, String groomOutFileName, String version, - Boolean singleCommits, Boolean dupeCheckOff, Boolean dupeFixOn, Boolean ghost2CheckOff, Boolean ghost2FixOn, Boolean finalShutdownFlag, Boolean cacheDbOkFlag, @@ -532,9 +450,9 @@ public class DataGrooming { ghostNodeHash = new HashMap<String, Vertex>(); dupeGroups = new ArrayList<>(); + LOGGER.debug(" Using default schemaVersion = [" + schemaVersions.getDefaultVersion().toString() + "]" ); Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion()); - // NOTE --- At one point, we tried explicitly searching for // nodes that were missing their aai-node-type (which does // happen sometimes), but the search takes too long and cannot @@ -567,7 +485,7 @@ public class DataGrooming { int thisNtDeleteCount = 0; if( !singleNodeType.equals("") && !singleNodeType.equals(nType) ){ - // We are only going to process this one node type + // We are only going to process this one node type and this isn't it continue; } @@ -633,12 +551,19 @@ public class DataGrooming { continue; } totalNodeCount++; - List <Vertex> secondGetList = new ArrayList <> (); + // Note - the "secondGetList" is used one node at a time - it is populated + // using either the node's defined unique key/keys (if it is not dependent on + // a "parent" node, or is populated using the key/keys "under" it's parent node. + List <Vertex> secondGetList = new ArrayList <> (); + // ----------------------------------------------------------------------- // For each vertex of this nodeType, we want to: - // a) make sure that it can be retrieved using it's AAI defined key - // b) make sure that it is not a duplicate + // a) make sure it can be retrieved using its "aai-uri" + // b) make sure that it can be retrieved using it's AAI defined key(s) + // c) make sure that it is not a duplicate // ----------------------------------------------------------------------- + + Boolean aaiUriOk = checkAaiUriOk(source1, thisVtx); // For this instance of this nodeType, get the key properties HashMap<String, Object> propHashWithKeys = new HashMap<>(); @@ -646,7 +571,6 @@ public class DataGrooming { while (keyPropI.hasNext()) { String propName = keyPropI.next(); String propVal = ""; - //delete an already deleted vertex Object obj = thisVtx.<Object>property(propName).orElse(null); if (obj != null) { propVal = obj.toString(); @@ -688,7 +612,7 @@ public class DataGrooming { processedVertices.add(thisVtx.id().toString()); Object ob = thisVtx.<Object>property("aai-node-type").orElse(null); if( ob == null && !skipIndexUpdateFix ){ - updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps); + updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps); updateOnlyFlag = true; dummyUpdCount++; // Since we are updating this delete candidate, not deleting it, we @@ -767,16 +691,33 @@ public class DataGrooming { } }// end of -- else this is a dependent node -- piece - if( depNodeOk && (secondGetList == null || secondGetList.size() == 0) ){ - // We could not get the node back using it's own key info. + Boolean aaiKeysOk = true; + if( (secondGetList == null || secondGetList.size() == 0) + && depNodeOk){ + aaiKeysOk = false; + } + + if( (!aaiKeysOk || !aaiUriOk) + && !deleteCandidateList.contains(thisVid) + && !skipIndexUpdateFix ){ + // Either the aaiKeys or aaiUri was bad. This may + // be a problem with the indexes so we'll try to reset + // them since this node is not on the delete list from + // a previous run. + tryToReSetIndexedProps(thisVtx, thisVid, indexedProps); + } + + if( !aaiKeysOk || !aaiUriOk ){ + // We could not get the node back using it's own key info or aai-uri. // So, it's a PHANTOM + if (deleteCandidateList.contains(thisVid)) { boolean okFlag = true; boolean updateOnlyFlag = false; try { Object ob = thisVtx.<Object>property("aai-node-type").orElse(null); if( ob == null && !skipIndexUpdateFix ){ - updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps); + updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps); dummyUpdCount++; updateOnlyFlag = true; // Since we are updating this delete candidate, not deleting it, we @@ -813,7 +754,7 @@ public class DataGrooming { List<String> tmpDupeGroups = checkAndProcessDupes( TRANSID, FROMAPPID, g, source1, version, nType, secondGetList, dupeFixOn, - deleteCandidateList, singleCommits, dupeGroups, loader); + deleteCandidateList, dupeGroups, loader); Iterator<String> dIter = tmpDupeGroups.iterator(); while (dIter.hasNext()) { // Add in any newly found dupes to our running list @@ -862,7 +803,7 @@ public class DataGrooming { List<String> tmpDupeGroups = checkAndProcessDupes( TRANSID, FROMAPPID, g, source1, version, nType, dupeList, dupeFixOn, - deleteCandidateList, singleCommits, dupeGroups, loader); + deleteCandidateList, dupeGroups, loader); Iterator<String> dIter = tmpDupeGroups.iterator(); while (dIter.hasNext()) { // Add in any newly found dupes to our running list @@ -874,13 +815,7 @@ public class DataGrooming { }// end of extra dupe check for non-dependent nodes - if ( (thisNtDeleteCount > 0) && singleCommits ) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - - } - thisNtDeleteCount = 0; + thisNtDeleteCount = 0; // Reset for the next pass LOGGER.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total (in window) overall. " ); }// While-loop for each node type @@ -889,17 +824,16 @@ public class DataGrooming { if( !skipEdgeCheckFlag ){ - // -------------------------------------------------------------------------------------- - // Now, we're going to look for one-armed-edges. Ie. an edge that - // should have - // been deleted (because a vertex on one side was deleted) but - // somehow was not deleted. - // So the one end of it points to a vertexId -- but that vertex is - // empty. - // -------------------------------------------------------------------------------------- + // --------------------------------------------------------------- + // Now, we're going to look for one-armed-edges. Ie. an + // edge that should have been deleted (because a vertex on + // one side was deleted) but somehow was not deleted. + // So the one end of it points to a vertexId -- but that + // vertex is empty. + // -------------------------------------------------------------- // To do some strange checking - we need a second graph object - LOGGER.debug(" ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n"); + LOGGER.debug(" ---- NOTE --- about to open a SECOND graph (takes a little while)--------\n"); // Note - graph2 just reads - but we want it to use a fresh connection to // the database, so we are NOT using the CACHED DB CONFIG here. @@ -1031,6 +965,7 @@ public class DataGrooming { // If we can NOT get this ghost with the SECOND graph-object, // it is still a ghost since even though we can get data about it using the FIRST graph // object. + try { ghost2 = g.traversal().V(vIdLong).next(); } @@ -1051,6 +986,7 @@ public class DataGrooming { LOGGER.warn(">>> WARNING trying to get edge's In-vertex props ", err); } } + if (keysMissing || vIn == null || vNtI.equals("") || cantGetUsingVid) { // this is a bad edge because it points to a vertex @@ -1067,14 +1003,7 @@ public class DataGrooming { else { vIn.remove(); } - if (singleCommits) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - } - else { - executeFinalCommit = true; - } + executeFinalCommit = true; deleteCount++; } catch (Exception e1) { okFlag = false; @@ -1092,14 +1021,7 @@ public class DataGrooming { // vertex try { e.remove(); - if (singleCommits) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - } - else { - executeFinalCommit = true; - } + executeFinalCommit = true; deleteCount++; } catch (Exception ex) { // NOTE - often, the exception is just @@ -1192,14 +1114,7 @@ public class DataGrooming { else if (vOut != null) { vOut.remove(); } - if (singleCommits) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - } - else { - executeFinalCommit = true; - } + executeFinalCommit = true; deleteCount++; } catch (Exception e1) { okFlag = false; @@ -1217,14 +1132,7 @@ public class DataGrooming { // vertex try { e.remove(); - if (singleCommits) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - } - else { - executeFinalCommit = true; - } + executeFinalCommit = true; deleteCount++; } catch (Exception ex) { // NOTE - often, the exception is just @@ -1260,7 +1168,7 @@ public class DataGrooming { deleteCount = deleteCount + dupeGrpsDeleted; - if (!singleCommits && (deleteCount > 0 || dummyUpdCount > 0) ){ + if (deleteCount > 0 || dummyUpdCount > 0){ executeFinalCommit = true; } @@ -1647,12 +1555,50 @@ public class DataGrooming { }// end of doTheGrooming() - public void updateIndexedProps(Vertex thisVtx, String thisVidStr, String nType, + public void tryToReSetIndexedProps(Vertex thisVtx, String thisVidStr, ArrayList <String> indexedProps) { + // Note - This is for when a node looks to be a phantom (ie. an index/pointer problem) + // We will only deal with properties that are indexed and have a value - and for those, + // we will re-set them to the same value they already have, so that hopefully if their + // index was broken, it may get re-set. + + LOGGER.info(" We will try to re-set the indexed properties for this node without changing any property values. VID = " + thisVidStr ); + // These reserved-prop-names are all indexed for all nodes + + ArrayList <String> propList = new ArrayList <String> (); + propList.addAll(indexedProps); + // Add in the global props that we'd also like to reset + propList.add("aai-node-type"); + propList.add("aai-uri"); + propList.add("aai-uuid"); + Iterator<String> propNameItr = propList.iterator(); + while( propNameItr.hasNext() ){ + String propName = propNameItr.next(); + try { + Object valObj = thisVtx.<Object>property(propName).orElse(null); + if( valObj != null ){ + LOGGER.info(" We will try resetting prop [" + propName + + "], to val = [" + valObj.toString() + "] for VID = " + thisVidStr); + thisVtx.property(propName, valObj); + } + } catch (Exception ex ){ + // log that we did not re-set this property + LOGGER.debug("DEBUG - Exception while trying to re-set the indexed properties for this node: VID = " + + thisVidStr + ". exception msg = [" + ex.getMessage() + "]" ); + } + } + } + + + public void updateIndexedPropsForMissingNT(Vertex thisVtx, String thisVidStr, String nType, HashMap <String,String>propTypeHash, ArrayList <String> indexedProps) { - // This is a "missing-aai-node-type" scenario. + // This is for the very specific "missing-aai-node-type" scenario. + // That is: a node that does not have the "aai-node-type" property, but still has + // an aai-node-type Index pointing to it and is an orphan node. Nodes like this + // are (probably) the result of a delete request that got hosed. // Other indexes may also be messed up, so we will update all of them on // this pass. A future pass will just treat this node like a regular orphan // and delete it (if appropriate). + LOGGER.info(" We will be updating the indexed properties for this node to dummy values. VID = " + thisVidStr ); String dummyPropValStr = thisVidStr + "dummy"; // These reserved-prop-names are all indexed for all nodes @@ -1752,9 +1698,20 @@ public class DataGrooming { Object ob = v.<Object>property(propName).orElse(null); if (ob == null || ob.toString().equals("")) { // It is missing a key property + String thisVertId = v.id().toString(); + LOGGER.debug(" -- Vid = " + thisVertId + + ",nType = [" + nType + "], is missing keyPropName = [" + propName + "]"); return true; } } + Object ob = v.<Object>property("aai-uri").orElse(null); + if (ob == null || ob.toString().equals("")) { + // It is missing a key property + String thisVertId = v.id().toString(); + LOGGER.debug(" -- Vid = " + thisVertId + + ",nType = [" + nType + "], is missing its [aai-uri] property"); + return true; + } } catch (AAIException e) { // Something was wrong -- but since we weren't able to check // the keys, we will not declare that it is missing keys. @@ -1829,11 +1786,11 @@ public class DataGrooming { ArrayList<Vertex> dupeVertexList, String ver, Loader loader) throws AAIException { - // This method assumes that it is being passed a List of vertex objects - // which - // violate our uniqueness constraints. - - Vertex nullVtx = null; + // This method assumes that it is being passed a List of + // vertex objects which violate our uniqueness constraints. + // Note - returning a null vertex means we could not + // safely pick one to keep (Ie. safely know which to delete.) + Vertex nullVtx = null; if (dupeVertexList == null) { return nullVtx; @@ -1846,6 +1803,31 @@ public class DataGrooming { return (dupeVertexList.get(0)); } + // If they don't all have the same aai-uri, then we will not + // choose between them - we'll need someone to manually + // check to pick which one makes sense to keep. + Object uriOb = dupeVertexList.get(0).<Object>property("aai-uri").orElse(null); + if( uriOb == null || uriOb.toString().equals("") ){ + // this is a bad node - hopefully will be picked up by phantom checker + return nullVtx; + } + String thisUri = uriOb.toString(); + for (int i = 1; i < listSize; i++) { + uriOb = dupeVertexList.get(i).<Object>property("aai-uri").orElse(null); + if( uriOb == null || uriOb.toString().equals("") ){ + // this is a bad node - hopefully will be picked up by phantom checker + return nullVtx; + } + String nextUri = uriOb.toString(); + if( !thisUri.equals(nextUri)){ + // there are different URI's on these - so we can't pick + // a dupe to keep. Someone will need to look at it. + return nullVtx; + } + } + + // Compare them two at a time to see if we can tell which out of + // the batch to keep. Vertex vtxPreferred = null; Vertex currentFaveVtx = dupeVertexList.get(0); for (int i = 1; i < listSize; i++) { @@ -1860,7 +1842,14 @@ public class DataGrooming { } } - return (currentFaveVtx); + if( currentFaveVtx != null && checkAaiUriOk(g, currentFaveVtx) ){ + return (currentFaveVtx); + } + else { + // We had a preferred vertex, but its aai-uri was bad, so + // we will not recommend one to keep. + return nullVtx; + } } // end of getPreferredDupe() @@ -2041,7 +2030,7 @@ public class DataGrooming { // pointer to it, then save that one. Otherwise, take the // older one. if( !onlyNodeThatIndexPointsToVidStr.equals("") ){ - // only one is reachable via the index - choose that one. + // only one is reachable via the index - choose that one if its aai-uri is also good if( onlyNodeThatIndexPointsToVidStr.equals(vidA.toString()) ){ preferredVtx = vtxA; } @@ -2049,11 +2038,13 @@ public class DataGrooming { preferredVtx = vtxB; } } - else if (vidA < vidB) { + else if ( checkAaiUriOk(g, vtxA) ) { preferredVtx = vtxA; - } else { + } + else if ( checkAaiUriOk(g, vtxB) ) { preferredVtx = vtxB; } + // else we're picking neither because neither one had a working aai-uri index property } } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) { // 3 - VertexA is connected to more things than vtxB. @@ -2112,14 +2103,13 @@ public class DataGrooming { * @param passedVertList the passed vert list * @param dupeFixOn the dupe fix on * @param deleteCandidateList the delete candidate list - * @param singleCommits the single commits * @param alreadyFoundDupeGroups the already found dupe groups * @return the array list */ private List<String> checkAndProcessDupes(String transId, String fromAppId, Graph g, GraphTraversalSource source, String version, String nType, List<Vertex> passedVertList, Boolean dupeFixOn, - Set<String> deleteCandidateList, Boolean singleCommits, + Set<String> deleteCandidateList, ArrayList<String> alreadyFoundDupeGroups, Loader loader ) { ArrayList<String> returnList = new ArrayList<>(); @@ -2203,7 +2193,7 @@ public class DataGrooming { if (dupeFixOn) { didRemove = deleteNonKeepersIfAppropriate(g, dupesStr, prefV.id().toString(), - deleteCandidateList, singleCommits); + deleteCandidateList); } if (didRemove) { dupeGrpsDeleted++; @@ -2255,7 +2245,7 @@ public class DataGrooming { didRemove = deleteNonKeepersIfAppropriate( g, dupesStr, prefV.id() .toString(), - deleteCandidateList, singleCommits); + deleteCandidateList ); } if (didRemove) { dupeGrpsDeleted++; @@ -2359,12 +2349,11 @@ public class DataGrooming { * @param dupeInfoString the dupe info string * @param vidToKeep the vid to keep * @param deleteCandidateList the delete candidate list - * @param singleCommits the single commits * @return the boolean */ private Boolean deleteNonKeepersIfAppropriate(Graph g, String dupeInfoString, String vidToKeep, - Set<String> deleteCandidateList, Boolean singleCommits) { + Set<String> deleteCandidateList ) { Boolean deletedSomething = false; // This assumes that the dupeInfoString is in the format of @@ -2421,11 +2410,6 @@ public class DataGrooming { .traversal().V(longVertId).next(); vtx.remove(); - if (singleCommits) { - // NOTE - the singleCommits option is not used in normal processing - g.tx().commit(); - g = AAIGraph.getInstance().getGraph().newTransaction(); - } } catch (Exception e) { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); @@ -2455,6 +2439,70 @@ public class DataGrooming { }// end of deleteNonKeepersIfAppropriate() + + /** + * makes sure aai-uri exists and can be used to get this node back + * + * @param transId the trans id + * @param fromAppId the from app id + * @param graph the graph + * @param vtx + * @return true if aai-uri is populated and the aai-uri-index points to this vtx + * @throws AAIException the AAI exception + */ + public Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx ) + throws AAIException{ + String aaiUriStr = ""; + try { + Object ob = origVtx.<Object>property("aai-uri").orElse(null); + String origVid = origVtx.id().toString(); + LOGGER.debug("DEBUG --- do checkAaiUriOk() for origVid = " + origVid); + if (ob == null || ob.toString().equals("")) { + // It is missing its aai-uri + LOGGER.debug("DEBUG No [aai-uri] property found for vid = [" + + origVid + "] " ); + return false; + } + else { + aaiUriStr = ob.toString(); + Iterator <Vertex> verts = graph.V().has("aai-uri",aaiUriStr); + int count = 0; + while( verts.hasNext() ){ + count++; + Vertex foundV = verts.next(); + String foundVid = foundV.id().toString(); + if( !origVid.equals(foundVid) ){ + LOGGER.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] brought back different vertex with vid = [" + + foundVid + "]." ); + return false; + } + } + if( count == 0 ){ + LOGGER.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] could not be used to query for that vertex. "); + return false; + } + else if( count > 1 ){ + LOGGER.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] brought back multiple (" + + count + ") vertices instead of just one. "); + return false; + } + } + } + catch( Exception ex ){ + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + LOGGER.error(" ERROR trying to get node with aai-uri: [" + aaiUriStr + "]" + LogFormatTools.getStackTop(ex)); + } + return true; + + }// End of checkAaiUriOk() + /** * Gets the node just using key params. * @@ -2876,47 +2924,44 @@ class CommandLineArgs { @Parameter(names = "--help", help = true) public boolean help; - @Parameter(names = "-edgesOnly", description = "Check grooming on edges only", arity = 1) + @Parameter(names = "-edgesOnly", description = "Check grooming on edges only") public Boolean edgesOnlyFlag = false; - @Parameter(names = "-autoFix", description = "doautofix", arity = 1) + @Parameter(names = "-autoFix", description = "doautofix") public Boolean doAutoFix = false; - @Parameter(names = "-skipHostCheck", description = "skipHostCheck", arity = 1) + @Parameter(names = "-skipHostCheck", description = "skipHostCheck") public Boolean skipHostCheck = false; - @Parameter(names = "-dontFixOrphans", description = "dontFixOrphans", arity = 1) + @Parameter(names = "-dontFixOrphans", description = "dontFixOrphans") public Boolean dontFixOrphansFlag = false; - @Parameter(names = "-singleCommits", description = "singleCommits", arity = 1) - public Boolean singleCommits = false; - - @Parameter(names = "-dupeCheckOff", description = "dupeCheckOff", arity = 1) + @Parameter(names = "-dupeCheckOff", description = "dupeCheckOff") public Boolean dupeCheckOff = false; - @Parameter(names = "-dupeFixOn", description = "dupeFixOn", arity = 1) + @Parameter(names = "-dupeFixOn", description = "dupeFixOn") public Boolean dupeFixOn = false; - @Parameter(names = "-ghost2CheckOff", description = "ghost2CheckOff", arity = 1) + @Parameter(names = "-ghost2CheckOff", description = "ghost2CheckOff") public Boolean ghost2CheckOff = false; - @Parameter(names = "-ghost2FixOn", description = "ghost2FixOn", arity = 1) + @Parameter(names = "-ghost2FixOn", description = "ghost2FixOn") public Boolean ghost2FixOn = false; - @Parameter(names = "-neverUseCache", description = "neverUseCache", arity = 1) + @Parameter(names = "-neverUseCache", description = "neverUseCache") public Boolean neverUseCache = false; - @Parameter(names = "-skipEdgeChecks", description = "skipEdgeChecks", arity = 1) + @Parameter(names = "-skipEdgeChecks", description = "skipEdgeChecks") public Boolean skipEdgeCheckFlag = false; - @Parameter(names = "-skipIndexUpdateFix", description = "skipIndexUpdateFix", arity = 1) + @Parameter(names = "-skipIndexUpdateFix", description = "skipIndexUpdateFix") public Boolean skipIndexUpdateFix = false; @Parameter(names = "-maxFix", description = "maxFix") - public int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX; + public int maxRecordsToFix = GraphAdminConstants.AAI_GROOMING_DEFAULT_MAX_FIX; @Parameter(names = "-sleepMinutes", description = "sleepMinutes") - public int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES; + public int sleepMinutes = GraphAdminConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES; // A value of 0 means that we will not have a time-window -- we will look // at all nodes of the passed-in nodeType. @@ -2926,11 +2971,9 @@ class CommandLineArgs { @Parameter(names = "-f", description = "file") public String prevFileName = ""; - @Parameter(names = "-singleNodeType", description = "sleepMinutes") + @Parameter(names = "-singleNodeType", description = "singleNodeType") public String singleNodeType = ""; - - Boolean finalShutdownFlag = true; - Boolean cacheDbOkFlag = true; + } public HashMap<String, Vertex> getGhostNodeHash() { diff --git a/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java b/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java index 85a127f..ff3a6d7 100644 --- a/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java +++ b/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java @@ -85,7 +85,6 @@ public class DataGroomingTasks { append("enableedgesonly" , AAIConfig.get("aai.datagrooming.enableedgesonly"), dataGroomingFlagMap); append("enableskipedgechecks" , AAIConfig.get("aai.datagrooming.enableskipedgechecks"), dataGroomingFlagMap); append("enablemaxfix" , AAIConfig.get("aai.datagrooming.enablemaxfix"), dataGroomingFlagMap); - append("enablesinglecommits" , AAIConfig.get("aai.datagrooming.enablesinglecommits"), dataGroomingFlagMap); append("enabledupecheckoff" , AAIConfig.get("aai.datagrooming.enabledupecheckoff"), dataGroomingFlagMap); append("enableghost2checkoff" , AAIConfig.get("aai.datagrooming.enableghost2checkoff"), dataGroomingFlagMap); append("enableghost2fixon" , AAIConfig.get("aai.datagrooming.enableghost2fixon"), dataGroomingFlagMap); @@ -94,6 +93,7 @@ public class DataGroomingTasks { append("timewindowminutesvalue" , AAIConfig.get("aai.datagrooming.timewindowminutesvalue"), dataGroomingFlagMap); append("sleepminutesvalue" , AAIConfig.get("aai.datagrooming.sleepminutesvalue"), dataGroomingFlagMap); append("maxfixvalue" , AAIConfig.get("aai.datagrooming.maxfixvalue"), dataGroomingFlagMap); + // Note: singleNodeType parameter is not used when running from the cron if(LOGGER.isDebugEnabled()){ LOGGER.debug("DataGrooming Flag Values : "); @@ -113,7 +113,7 @@ public class DataGroomingTasks { } if("true".equals(dataGroomingFlagMap.get("enabletimewindowminutes"))){ paramsArray.add("-timeWindowMinutes"); - paramsArray.add(dataGroomingFlagMap.get("enabletimewindowminutesvalue")); + paramsArray.add(dataGroomingFlagMap.get("timewindowminutesvalue")); } if("true".equals(dataGroomingFlagMap.get("enableskiphostcheck"))){ paramsArray.add("-skipHostCheck"); @@ -135,9 +135,6 @@ public class DataGroomingTasks { paramsArray.add("-maxFix"); paramsArray.add(dataGroomingFlagMap.get("maxfixvalue")); } - if("true".equals(dataGroomingFlagMap.get("enablesinglecommits"))){ - paramsArray.add("-singleCommits"); - } if("true".equals(dataGroomingFlagMap.get("enabledupecheckoff"))){ paramsArray.add("-dupeCheckOff"); } diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java index 12815ee..e7ae5ec 100644 --- a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java +++ b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java @@ -17,28 +17,6 @@ * limitations under the License. * ============LICENSE_END========================================================= */ - -/** - * ============LICENSE_START======================================================= - * org.onap.aai - * ================================================================================ - * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * - * ECOMP is a trademark and service mark of AT&T Intellectual Property. - */ package org.onap.aai.datasnapshot; import java.io.ByteArrayOutputStream; @@ -48,32 +26,34 @@ import java.io.IOException; import java.io.InputStream; import java.io.SequenceInputStream; import java.util.*; - import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; - import java.util.concurrent.TimeUnit; -import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.commons.configuration.PropertiesConfiguration; - import org.apache.tinkerpop.gremlin.structure.io.IoCore; -import org.apache.tinkerpop.gremlin.structure.io.graphson.LegacyGraphSONReader; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.dbmap.AAIGraphConfig; import org.onap.aai.exceptions.AAIException; import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.util.AAIConfig; import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.GraphAdminConstants; import org.onap.aai.util.AAISystemExitUtil; import org.onap.aai.util.FormatDate; +import org.onap.aai.util.GraphAdminDBUtils; import com.att.eelf.configuration.Configuration; import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.beust.jcommander.ParameterException; + import org.janusgraph.core.JanusGraph; import org.janusgraph.core.JanusGraphFactory; import org.janusgraph.core.util.JanusGraphCleanup; @@ -87,12 +67,15 @@ public class DataSnapshot { private static final Set<String> SNAPSHOT_RELOAD_COMMANDS = new HashSet<>(); + private static final String MIGRATION_PROCESS_NAME = "migration"; + static { - SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_LEGACY_DATA"); SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA"); SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA_MULTI"); } + private CommandLineArgs cArgs; + /** * The main method. @@ -104,113 +87,195 @@ public class DataSnapshot { boolean success = true; + Boolean dbClearFlag = false; + JanusGraph graph = null; + String command = "JUST_TAKE_SNAPSHOT"; // This is the default + String oldSnapshotFileName = ""; + + DataSnapshot dataSnapshot = new DataSnapshot(); + success = dataSnapshot.executeCommand(args, success, dbClearFlag, graph, command, + oldSnapshotFileName); + + if(success){ + AAISystemExitUtil.systemExitCloseAAIGraph(0); + } else { + AAISystemExitUtil.systemExitCloseAAIGraph(1); + } + + }// End of main() + + + public boolean executeCommand(String[] args, boolean success, + Boolean dbClearFlag, JanusGraph graph, String command, + String oldSnapshotFileName) { + // Set the logging file properties to be used by EELFManager System.setProperty("aai.service.name", DataSnapshot.class.getSimpleName()); Properties props = System.getProperties(); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG); LOGGER = EELFManager.getInstance().getLogger(DataSnapshot.class); - Boolean dbClearFlag = false; - JanusGraph graph = null; - String command = "JUST_TAKE_SNAPSHOT"; // This is the default - String oldSnapshotFileName = ""; + cArgs = new CommandLineArgs(); - Long vertAddDelayMs = 1L; // Default value - Long edgeAddDelayMs = 1L; // Default value + String itemName = "aai.datasnapshot.threads.for.create"; - Long failureDelayMs = 50L; // Default value - Long retryDelayMs = 1500L; // Default value - int maxErrorsPerThread = 25; // Default value - Long vertToEdgeProcDelay = 9000L; // Default value - Long staggerThreadDelay = 5000L; // Default value - - int threadCount = 0; - Boolean debugFlag = false; - int debugAddDelayTime = 1; // Default to 1 millisecond - - boolean isExistingTitan = false; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.threadCount = Integer.parseInt(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + int threadCount4Create = cArgs.threadCount; + + cArgs.snapshotType = "graphson"; + + Long vertAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS; + itemName = "aai.datasnapshot.vertex.add.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.vertAddDelayMs = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + Long edgeAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS; + itemName = "aai.datasnapshot.edge.add.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.edgeAddDelayMs = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + Long failureDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS; + itemName = "aai.datasnapshot.failure.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.failureDelayMs = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + Long retryDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_RETRY_DELAY_MS; + itemName = "aai.datasnapshot.retry.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.retryDelayMs = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + int maxErrorsPerThread = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD; + itemName = "aai.datasnapshot.max.errors.per.thread"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.maxErrorsPerThread = Integer.parseInt(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + Long vertToEdgeProcDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS; + itemName = "aai.datasnapshot.vertex.to.edge.proc.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.vertToEdgeProcDelay = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + itemName = "aai.datasnapshot.stagger.thread.delay.ms"; + try { + String val = AAIConfig.get(itemName); + if( val != null && !val.equals("") ){ + cArgs.staggerThreadDelay = Long.parseLong(val); + } + }catch ( Exception e ){ + LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage()); + } + + long debugAddDelayTime = 1; // Default to 1 millisecond + Boolean debug4Create = false; // By default we do not use debugging for snapshot creation + JCommander jCommander; + try { + jCommander = new JCommander(cArgs, args); + jCommander.setProgramName(DataSnapshot.class.getSimpleName()); + } catch (ParameterException e1) { + LOGGER.error("Error - invalid value passed to list of args - "+args); + AAISystemExitUtil.systemExitCloseAAIGraph(1); + } + + if (args.length >= 1) { - command = args[0]; + command = cArgs.command; } - - if( SNAPSHOT_RELOAD_COMMANDS.contains(command)){ - if (args.length == 2) { + + String source = cArgs.caller; + + String snapshotType = "graphson"; + if( SNAPSHOT_RELOAD_COMMANDS.contains(cArgs.command)){ + if (args.length >= 2) { // If re-loading, they need to also pass the snapshot file name to use. // We expected the file to be found in our snapshot directory. - oldSnapshotFileName = args[1]; + oldSnapshotFileName = cArgs.oldFileName; + snapshotType = cArgs.snapshotType; } } else if( command.equals("THREADED_SNAPSHOT") ){ - if (args.length == 2) { + if (args.length >= 2) { // If doing a "threaded" snapshot, they need to specify how many threads to use try { - threadCount = Integer.parseInt(args[1]); + threadCount4Create = cArgs.threadCount; } catch ( NumberFormatException nfe ){ - ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); + ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]"); + LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(1); } - if( threadCount < 1 || threadCount > 100 ){ - ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); + if( threadCount4Create < 1 || threadCount4Create > 100 ){ + ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]"); + LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(1); } - LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount ); - } - else if (args.length == 3) { + LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount4Create ); + // If doing a "threaded" snapshot, they need to specify how many threads to use // They can also use debug mode if they pass the word "DEBUG" to do the nodes one at a time to see where it breaks. - try { - threadCount = Integer.parseInt(args[1]); - } - catch ( NumberFormatException nfe ){ - ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - if( threadCount < 1 || threadCount > 100 ){ - ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - if( args[2].equals("DEBUG") ){ - debugFlag = true; - } - LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + - ", and DEBUG mode set ON. "); - } - else if (args.length == 4) { - // If doing a "threaded" snapshot, they need to specify how many threads to use (param 1) - // They can also use debug mode if they pass the word "DEBUG" to do the nodes one (param 2) - // They can also pass a delayTimer - how many milliseconds to put between each node's ADD (param 3) - try { - threadCount = Integer.parseInt(args[1]); + if( cArgs.debugFlag.equals("DEBUG") ){ + debug4Create = true; } - catch ( NumberFormatException nfe ){ - ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - if( threadCount < 1 || threadCount > 100 ){ - ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); - LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - if( args[2].equals("DEBUG") ){ - debugFlag = true; - } - try { - debugAddDelayTime = Integer.parseInt(args[3]); - } - catch ( NumberFormatException nfe ){ - ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]"); - LOGGER.debug("Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]"); - AAISystemExitUtil.systemExitCloseAAIGraph(1); + LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount4Create + + ", and DEBUG-flag set to: " + debug4Create ); + + if (debug4Create) { + // If doing a "threaded" snapshot, they need to specify how many threads to use (param 1) + // They can also use debug mode if they pass the word "DEBUG" to do the nodes one (param 2) + // They can also pass a delayTimer - how many milliseconds to put between each node's ADD (param 3) + try { + debugAddDelayTime = cArgs.debugAddDelayTime; + } catch (NumberFormatException nfe) { + ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + + cArgs.debugAddDelayTime + "]"); + LOGGER.debug("Bad (non-integer) debugAddDelayTime passed to DataSnapshot ["+ cArgs.debugAddDelayTime + "]"); + AAISystemExitUtil.systemExitCloseAAIGraph(1); + } + LOGGER.debug(" Will do Threaded Snapshot with threadCount = "+ threadCount4Create + ", DEBUG-flag set to: " + + debug4Create + ", and addDelayTimer = " + debugAddDelayTime + " mSec. "); } - LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + - ", DEBUG mode ON and addDelayTimer = " + debugAddDelayTime + " mSec. "); } else { ErrorLogHelper.logError("AAI_6128", "Wrong param count (should be 2,3 or 4) when using THREADED_SNAPSHOT."); @@ -219,37 +284,32 @@ public class DataSnapshot { } } else if( command.equals("MULTITHREAD_RELOAD") ){ - // Note - this will use as many threads as the snapshot file is + // Note - this will use as many threads as the snapshot file is // broken up into. (up to a limit) - if (args.length == 2) { + if (args.length >= 2) { // Since they are re-loading, they need to pass the snapshot file name to use. // We expected the file to be found in our snapshot directory. Note - if // it is a multi-part snapshot, then this should be the root of the name. // We will be using the default delay timers. - oldSnapshotFileName = args[1]; - } - else if (args.length == 7) { - // Since they are re-loading, they need to pass the snapshot file name to use. - // We expected the file to be found in our snapshot directory. Note - if - // it is a multi-part snapshot, then this should be the root of the name. - oldSnapshotFileName = args[1]; + oldSnapshotFileName = cArgs.oldFileName; + // They should be passing the timers in in this order: // vertDelay, edgeDelay, failureDelay, retryDelay - vertAddDelayMs = Long.parseLong(args[2]); - edgeAddDelayMs = Long.parseLong(args[3]); - failureDelayMs = Long.parseLong(args[4]); - retryDelayMs = Long.parseLong(args[5]); + vertAddDelayMs = cArgs.vertAddDelayMs; + edgeAddDelayMs = cArgs.edgeAddDelayMs; + failureDelayMs = cArgs.failureDelayMs; + retryDelayMs = cArgs.retryDelayMs; try { - maxErrorsPerThread = Integer.parseInt(args[6]); + maxErrorsPerThread = cArgs.maxErrorsPerThread; } catch ( NumberFormatException nfe ){ - ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]"); - LOGGER.debug("Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]"); + ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]"); + LOGGER.debug("Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(1); } if( maxErrorsPerThread < 1 ){ - ErrorLogHelper.logError("AAI_6128", "Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]"); - LOGGER.debug("Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]"); + ErrorLogHelper.logError("AAI_6128", "Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]"); + LOGGER.debug("Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(1); } } @@ -261,16 +321,37 @@ public class DataSnapshot { } else if (command.equals("CLEAR_ENTIRE_DATABASE")) { if (args.length >= 2) { - oldSnapshotFileName = args[1]; - } - if (args.length == 3) { - String titanFlag = args[2]; - if ("titan".equalsIgnoreCase(titanFlag)) { - isExistingTitan = true; - } + oldSnapshotFileName = cArgs.oldFileName; } } + + //Print Defaults + LOGGER.info("DataSnapshot command is [" + cArgs.command + "]"); + LOGGER.info("File name to reload snapshot [" + cArgs.oldFileName + "]"); + LOGGER.info("snapshotType is [" + cArgs.snapshotType + "]"); + LOGGER.info("Thread count is [" + cArgs.threadCount + "]"); + LOGGER.info("Debug Flag is [" + cArgs.debugFlag + "]"); + LOGGER.info("DebugAddDelayTimer is [" + cArgs.debugAddDelayTime + "]"); + LOGGER.info("VertAddDelayMs is [" + cArgs.vertAddDelayMs + "]"); + LOGGER.info("FailureDelayMs is [" + cArgs.failureDelayMs + "]"); + LOGGER.info("RetryDelayMs is [" + cArgs.retryDelayMs + "]"); + LOGGER.info("MaxErrorsPerThread is [" + cArgs.maxErrorsPerThread + "]"); + LOGGER.info("VertToEdgeProcDelay is [" + cArgs.vertToEdgeProcDelay + "]"); + LOGGER.info("StaggerThreadDelay is [" + cArgs.staggerThreadDelay + "]"); + LOGGER.info("Caller process is ["+ cArgs.caller + "]"); + + //Print non-default values + if (!AAIConfig.isEmpty(cArgs.fileName)){ + LOGGER.info("Snapshot file name (if not default) to use is [" + cArgs.fileName + "]"); + } + if (!AAIConfig.isEmpty(cArgs.snapshotDir)){ + LOGGER.info("Snapshot file Directory path (if not default) to use is [" + cArgs.snapshotDir + "]"); + } + if (!AAIConfig.isEmpty(cArgs.oldFileDir)){ + LOGGER.info("Directory path (if not default) to load the old snapshot file from is [" + cArgs.oldFileDir + "]"); + } + ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { @@ -278,43 +359,60 @@ public class DataSnapshot { ErrorLogHelper.loadProperties(); LOGGER.debug("Command = " + command + ", oldSnapshotFileName = [" + oldSnapshotFileName + "]."); String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots"; - // Make sure the dataSnapshots directory is there new File(targetDir).mkdirs(); LOGGER.debug(" ---- NOTE --- about to open graph (takes a little while) "); - if (command.equals("JUST_TAKE_SNAPSHOT")) { - // ------------------------------------------ - // They just want to take a snapshot. - // ------------------------------------------ + if ( (command.equals("THREADED_SNAPSHOT") || command.equals("JUST_TAKE_SNAPSHOT")) + && threadCount4Create == 1 ){ + // ------------------------------------------------------------------------------- + // They want to take a snapshot on a single thread and have it go in a single file + // NOTE - they can't use the DEBUG option in this case. + // ------------------------------------------------------------------------------- + LOGGER.debug("\n>>> Command = " + command ); verifyGraph(AAIGraph.getInstance().getGraph()); FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); String dteStr = fd.getDateTime(); - String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr; graph = AAIGraph.getInstance().getGraph(); - + GraphAdminDBUtils.logConfigs(graph.configuration()); + String newSnapshotOutFname = null; + long timeA = System.nanoTime(); + newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr; graph.io(IoCore.graphson()).writeGraph(newSnapshotOutFname); - LOGGER.debug("Snapshot written to " + newSnapshotOutFname); + long timeB = System.nanoTime(); + long diffTime = timeB - timeA; + long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); + long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); + LOGGER.debug(" -- Single-Thread dataSnapshot took: " + + minCount + " minutes, " + secCount + " seconds " ); } - else if (command.equals("THREADED_SNAPSHOT")) { - // --------------------------------------------------------------------- - // They want the creation of the snapshot to be spread out via threads - // --------------------------------------------------------------------- - + else if ( (command.equals("THREADED_SNAPSHOT") || command.equals("JUST_TAKE_SNAPSHOT")) + && threadCount4Create > 1 ){ + // ------------------------------------------------------------ + // They want the creation of the snapshot to be spread out via + // threads and go to multiple files + // ------------------------------------------------------------ + LOGGER.debug("\n>>> Command = " + command ); + String newSnapshotOutFname; + if (!AAIConfig.isEmpty(cArgs.fileName)){ + newSnapshotOutFname = cArgs.fileName; + } else { FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); String dteStr = fd.getDateTime(); - String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr; + newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr; + } verifyGraph(AAIGraph.getInstance().getGraph()); graph = AAIGraph.getInstance().getGraph(); LOGGER.debug(" Successfully got the Graph instance. "); + GraphAdminDBUtils.logConfigs(graph.configuration()); long timeA = System.nanoTime(); - LOGGER.debug(" Need to divide vertexIds across this many threads: " + threadCount ); + LOGGER.debug(" Need to divide vertexIds across this many threads: " + threadCount4Create ); HashMap <String,ArrayList> vertListHash = new HashMap <String,ArrayList> (); - for( int t = 0; t < threadCount; t++ ){ + for( int t = 0; t < threadCount4Create; t++ ){ ArrayList <Vertex> vList = new ArrayList <Vertex> (); String tk = "" + t; vertListHash.put( tk, vList); @@ -322,8 +420,8 @@ public class DataSnapshot { LOGGER.debug("Count how many nodes are in the db. "); long totalVertCount = graph.traversal().V().count().next(); LOGGER.debug(" Total Count of Nodes in DB = " + totalVertCount + "."); - long nodesPerFile = totalVertCount / threadCount; - LOGGER.debug(" Thread count = " + threadCount + ", each file will get (roughly): " + nodesPerFile + " nodes."); + long nodesPerFile = totalVertCount / threadCount4Create; + LOGGER.debug(" Thread count = " + threadCount4Create + ", each file will get (roughly): " + nodesPerFile + " nodes."); long timeA2 = System.nanoTime(); long diffTime = timeA2 - timeA; long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); @@ -340,7 +438,7 @@ public class DataSnapshot { // Divide up all the vertices so we can process them on different threads vtxIndex++; thisThrIndex++; - if( (thisThrIndex > nodesPerFile) && (currentTNum < threadCount -1) ){ + if( (thisThrIndex > nodesPerFile) && (currentTNum < threadCount4Create -1) ){ // We will need to start adding to the Hash for the next thread currentTNum++; currentTKey = "" + currentTNum; @@ -355,20 +453,20 @@ public class DataSnapshot { secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); LOGGER.debug(" -- To Loop over all vertices, and put them into sub-Arrays it took: " + minCount + " minutes, " + secCount + " seconds " ); - + // Need to print out each set of vertices using it's own thread ArrayList <Thread> threadArr = new ArrayList <Thread> (); - for( int thNum = 0; thNum < threadCount; thNum++ ){ + for( int thNum = 0; thNum < threadCount4Create; thNum++ ){ String thNumStr = "" + thNum; String subFName = newSnapshotOutFname + ".P" + thNumStr; Thread thr = new Thread(new PrintVertexDetails(graph, subFName, vertListHash.get(thNumStr), - debugFlag, debugAddDelayTime) ); + debug4Create, debugAddDelayTime, snapshotType) ); thr.start(); threadArr.add(thr); } // Make sure all the threads finish before moving on. - for( int thNum = 0; thNum < threadCount; thNum++ ){ + for( int thNum = 0; thNum < threadCount4Create; thNum++ ){ if( null != threadArr.get(thNum) ){ (threadArr.get(thNum)).join(); } @@ -380,170 +478,173 @@ public class DataSnapshot { secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); LOGGER.debug(" -- To write all the data out to snapshot files, it took: " + minCount + " minutes, " + secCount + " seconds " ); - + - } else if( command.equals("MULTITHREAD_RELOAD") ){ + } else if( command.equals("MULTITHREAD_RELOAD") ){ // --------------------------------------------------------------------- // They want the RELOAD of the snapshot to be spread out via threads // NOTE - it will only use as many threads as the number of files the // snapshot is written to. Ie. if you have a single-file snapshot, // then this will be single-threaded. - // + // + LOGGER.debug("\n>>> Command = " + command ); + + if (cArgs.oldFileDir != null && cArgs.oldFileDir != ""){ + targetDir = cArgs.oldFileDir; + } ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false); int fCount = snapFilesArr.size(); - Iterator <File> fItr = snapFilesArr.iterator(); - JanusGraph graph1 = AAIGraph.getInstance().getGraph(); long timeStart = System.nanoTime(); - + HashMap <String,String> old2NewVertIdMap = new <String,String> HashMap (); - + // We're going to try loading in the vertices - without edges or properties // using Separate threads - + ExecutorService executor = Executors.newFixedThreadPool(fCount); List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>(); - + for( int i=0; i < fCount; i++ ){ File f = snapFilesArr.get(i); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; - Thread.sleep(staggerThreadDelay); // Stagger the threads a bit + Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices ----"); - LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs + + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + ", maxErrorsPerThread = " + maxErrorsPerThread ); - Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, + Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER); Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader); - + // add Future to the list, we can get return value using Future list.add(future); LOGGER.debug(" -- Starting PartialDbLoad VERT_ONLY thread # "+ i ); } - - threadCount = 0; + + int threadCount4Reload = 0; int threadFailCount = 0; for(Future<HashMap<String,String>> fut : list){ - threadCount++; + threadCount4Reload++; try { old2NewVertIdMap.putAll(fut.get()); - LOGGER.debug(" -- back from PartialVertexLoader. returned thread # " + threadCount + + LOGGER.debug(" -- back from PartialVertexLoader. returned thread # " + threadCount4Reload + ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() ); - } - catch (InterruptedException e) { + } + catch (InterruptedException e) { threadFailCount++; e.printStackTrace(); - } + } catch (ExecutionException e) { threadFailCount++; e.printStackTrace(); } - } - + } + executor.shutdown(); - + if( threadFailCount > 0 ) { String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully. "; LOGGER.debug(emsg); throw new Exception( emsg ); } - + long timeX = System.nanoTime(); long diffTime = timeX - timeStart; long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); LOGGER.debug(" -- To reload just the vertex ids from the snapshot files, it took: " + minCount + " minutes, " + secCount + " seconds " ); - + // Give the DB a little time to chew on all those vertices Thread.sleep(vertToEdgeProcDelay); - + // ---------------------------------------------------------------------------------------- LOGGER.debug("\n\n\n -- Now do the edges/props ----------------------"); // ---------------------------------------------------------------------------------------- - - + + // We're going to try loading in the edges and missing properties // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader // so that the String-updates to the GraphSON will happen in the threads instead of // here in the un-threaded calling method. - executor = Executors.newFixedThreadPool(fCount); + executor = Executors.newFixedThreadPool(fCount); ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>(); for( int i=0; i < fCount; i++ ){ File f = snapFilesArr.get(i); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; - Thread.sleep(staggerThreadDelay); // Stagger the threads a bit + Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); LOGGER.debug(" -- Call the PartialPropAndEdgeLoader for Properties and EDGEs ----"); - LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs + + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + ", maxErrorsPerThread = " + maxErrorsPerThread ); - - Callable eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, - edgeAddDelayMs, failureDelayMs, retryDelayMs, + + Callable eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, + edgeAddDelayMs, failureDelayMs, retryDelayMs, old2NewVertIdMap, maxErrorsPerThread, LOGGER); Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader); - + //add Future to the list, we can get return value using Future listEdg.add(future); LOGGER.debug(" -- Starting PartialPropAndEdge thread # "+ i ); } - - threadCount = 0; + + threadCount4Reload = 0; for(Future<ArrayList<String>> fut : listEdg){ - threadCount++; + threadCount4Reload++; try{ fut.get(); // DEBUG -- should be doing something with the return value if it's not empty - ie. errors - LOGGER.debug(" -- back from PartialPropAndEdgeLoader. thread # " + threadCount ); - } - catch (InterruptedException e) { + LOGGER.debug(" -- back from PartialPropAndEdgeLoader. thread # " + threadCount4Reload ); + } + catch (InterruptedException e) { threadFailCount++; e.printStackTrace(); - } + } catch (ExecutionException e) { threadFailCount++; e.printStackTrace(); } - } - + } + executor.shutdown(); - + if( threadFailCount > 0 ) { String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully. "; LOGGER.debug(emsg); throw new Exception( emsg ); } - + // This is needed so we can see the data committed by the called threads graph1.tx().commit(); - + long timeEnd = System.nanoTime(); diffTime = timeEnd - timeX; minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); LOGGER.debug(" -- To reload the edges and properties from snapshot files, it took: " + minCount + " minutes, " + secCount + " seconds " ); - + long totalDiffTime = timeEnd - timeStart; long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime); long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount); LOGGER.debug(" -- TOTAL multi-threaded reload time: " + totalMinCount + " minutes, " + totalSecCount + " seconds " ); - + } else if (command.equals("CLEAR_ENTIRE_DATABASE")) { // ------------------------------------------------------------------ // They are calling this to clear the db before re-loading it // later // ------------------------------------------------------------------ - + LOGGER.debug("\n>>> Command = " + command ); // First - make sure the backup file(s) they will be using can be // found and has(have) data. // getFilesToProcess makes sure the file(s) exist and have some data. getFilesToProcess(targetDir, oldSnapshotFileName, true); - + LOGGER.debug("\n>>> WARNING <<<< "); LOGGER.debug(">>> All data and schema in this database will be removed at this point. <<<"); LOGGER.debug(">>> Processing will begin in 5 seconds. <<<"); @@ -562,118 +663,64 @@ public class DataSnapshot { String serviceName = System.getProperty("aai.service.name", "NA"); LOGGER.debug("Getting new configs for clearig"); PropertiesConfiguration propertiesConfiguration = new AAIGraphConfig.Builder(rtConfig).forService(serviceName).withGraphType(REALTIME_DB).buildConfiguration(); - if(isExistingTitan){ - LOGGER.debug("Existing DB is Titan"); - propertiesConfiguration.setProperty("graph.titan-version","1.0.0"); - } LOGGER.debug("Open New Janus Graph"); JanusGraph janusGraph = JanusGraphFactory.open(propertiesConfiguration); verifyGraph(janusGraph); - - if(isExistingTitan){ - JanusGraphFactory.drop(janusGraph); - } else { - janusGraph.close(); - JanusGraphCleanup.clear(janusGraph); - } + GraphAdminDBUtils.logConfigs(janusGraph.configuration()); + janusGraph.close(); + JanusGraphCleanup.clear(janusGraph); LOGGER.debug(" Done clearing data. "); LOGGER.debug(">>> IMPORTANT - NOTE >>> you need to run the SchemaGenerator (use GenTester) before "); LOGGER.debug(" reloading data or the data will be put in without indexes. "); dbClearFlag = true; LOGGER.debug("All done clearing DB"); - } else if (command.equals("RELOAD_LEGACY_DATA")) { - // ------------------------------------------------------------------- - // They want to restore the database from an old snapshot file - // ------------------------------------------------------------------- + } else if (command.equals("RELOAD_DATA")) { + // --------------------------------------------------------------------------- + // They want to restore the database from either a single file, or a group + // of snapshot files. Either way, this command will restore via single + // threaded processing. + // --------------------------------------------------------------------------- + LOGGER.debug("\n>>> Command = " + command ); verifyGraph(AAIGraph.getInstance().getGraph()); graph = AAIGraph.getInstance().getGraph(); + GraphAdminDBUtils.logConfigs(graph.configuration()); if (oldSnapshotFileName.equals("")) { - String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_LEGACY_DATA used."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName; - File f = new File(oldSnapshotFullFname); - if (!f.exists()) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } else if (!f.canRead()) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } else if (f.length() == 0) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data."; + String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_DATA used."; LOGGER.debug(emsg); AAISystemExitUtil.systemExitCloseAAIGraph(1); } - - LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname); - LOGGER.debug(" Begin reloading JanusGraph 0.5 data. "); - - LegacyGraphSONReader lgr = LegacyGraphSONReader.build().create(); - InputStream is = new FileInputStream(oldSnapshotFullFname); - lgr.readGraph(is, graph); - LOGGER.debug("Completed the inputGraph command, now try to commit()... "); - graph.tx().commit(); - LOGGER.debug("Completed reloading JanusGraph 0.5 data."); + long timeA = System.nanoTime(); - long vCount = graph.traversal().V().count().next(); - LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db."); - } else if (command.equals("RELOAD_DATA")) { - // ------------------------------------------------------------------- - // They want to restore the database from an old snapshot file - // ------------------------------------------------------------------- - verifyGraph(AAIGraph.getInstance().getGraph()); - graph = AAIGraph.getInstance().getGraph(); - if (oldSnapshotFileName.equals("")) { - String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_DATA used."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); + ArrayList <File> snapFilesArr = new ArrayList <File> (); + + // First, see if this is a single file (ie. no ".P#" suffix) + String onePieceSnapshotFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName; + File sf = new File(onePieceSnapshotFname); + if( sf.exists() ){ + snapFilesArr.add(sf); } - String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName; - File f = new File(oldSnapshotFullFname); - if (!f.exists()) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } else if (!f.canRead()) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read."; - LOGGER.debug(emsg); - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } else if (f.length() == 0) { - String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data."; + else { + // If it's a multi-part snapshot, then collect all the files for it + String thisSnapPrefix = oldSnapshotFileName + ".P"; + File fDir = new File(targetDir); // Snapshot directory + File[] allFilesArr = fDir.listFiles(); + for (File snapFile : allFilesArr) { + String snapFName = snapFile.getName(); + if( snapFName.startsWith(thisSnapPrefix)){ + snapFilesArr.add(snapFile); + } + } + } + + if( snapFilesArr.isEmpty() ){ + String emsg = "oldSnapshotFile " + onePieceSnapshotFname + "(with or without .P0) could not be found."; LOGGER.debug(emsg); AAISystemExitUtil.systemExitCloseAAIGraph(1); } - - LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname); - LOGGER.debug(" Begin reloading data. "); - graph.io(IoCore.graphson()).readGraph(oldSnapshotFullFname); - LOGGER.debug("Completed the inputGraph command, now try to commit()... "); - graph.tx().commit(); - LOGGER.debug("Completed reloading data."); - - long vCount = graph.traversal().V().count().next(); - - LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db."); - - } else if (command.equals("RELOAD_DATA_MULTI")) { - // ------------------------------------------------------------------- - // They want to restore the database from a group of snapshot files - // Note - this uses multiple snapshot files, but runs single-threaded. - // ------------------------------------------------------------------- - verifyGraph(AAIGraph.getInstance().getGraph()); - graph = AAIGraph.getInstance().getGraph(); - - ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false); - - long timeA = System.nanoTime(); int fCount = snapFilesArr.size(); - Iterator <File> fItr = snapFilesArr.iterator(); Vector<InputStream> inputStreamsV = new Vector<>(); for( int i = 0; i < fCount; i++ ){ File f = snapFilesArr.get(i); @@ -691,15 +738,20 @@ public class DataSnapshot { InputStream fis = new FileInputStream(fullFName); inputStreamsV.add(fis); } + // Now add inputStreams.elements() to the Vector, // inputStreams.elements() will return Enumerations InputStream sis = new SequenceInputStream(inputStreamsV.elements()); LOGGER.debug("Begin loading data from " + fCount + " files -----"); - graph.io(IoCore.graphson()).reader().create().readGraph(sis, graph); + if("gryo".equalsIgnoreCase(snapshotType)){ + graph.io(IoCore.gryo()).reader().create().readGraph(sis, graph); + } else { + graph.io(IoCore.graphson()).reader().create().readGraph(sis, graph); + } LOGGER.debug("Completed the inputGraph command, now try to commit()... "); graph.tx().commit(); - LOGGER.debug(" >> Completed reloading data."); - + LOGGER.debug("Completed reloading data."); + long vCount = graph.traversal().V().count().next(); LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db."); @@ -710,7 +762,8 @@ public class DataSnapshot { LOGGER.debug(" -- To Reload this snapshot, it took: " + minCount + " minutes, " + secCount + " seconds " ); - + LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db."); + } else { String emsg = "Bad command passed to DataSnapshot: [" + command + "]"; LOGGER.debug(emsg); @@ -728,7 +781,7 @@ public class DataSnapshot { ex.printStackTrace(); success = false; } finally { - if (!dbClearFlag && graph != null) { + if (!dbClearFlag && graph != null && !MIGRATION_PROCESS_NAME.equalsIgnoreCase(source)) { // Any changes that worked correctly should have already done // thier commits. if(!"true".equals(System.getProperty("org.onap.aai.graphadmin.started"))) { @@ -744,18 +797,13 @@ public class DataSnapshot { } } - if(success){ - AAISystemExitUtil.systemExitCloseAAIGraph(0); - } else { - AAISystemExitUtil.systemExitCloseAAIGraph(1); - } - - }// End of main() - + return success; + } + private static ArrayList <File> getFilesToProcess(String targetDir, String oldSnapshotFileName, boolean doingClearDb) throws Exception { - + if( oldSnapshotFileName == null || oldSnapshotFileName.equals("") ){ String emsg = "No oldSnapshotFileName passed to DataSnapshot for Reload. "; if( doingClearDb ) { @@ -764,9 +812,9 @@ public class DataSnapshot { LOGGER.debug(emsg); throw new Exception( emsg ); } - + ArrayList <File> snapFilesArrList = new ArrayList <File> (); - + // First, we'll assume that this is a multi-file snapshot and // look for names based on that. String thisSnapPrefix = oldSnapshotFileName + ".P"; @@ -787,9 +835,9 @@ public class DataSnapshot { snapFilesArrList.add(snapFile); } } - + if( snapFilesArrList.isEmpty() ){ - // Multi-file snapshot check did not find files, so this may + // Multi-file snapshot check did not find files, so this may // be a single-file snapshot. String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName; File f = new File(oldSnapshotFullFname); @@ -808,7 +856,7 @@ public class DataSnapshot { } snapFilesArrList.add(f); } - + if( snapFilesArrList.isEmpty() ){ // Still haven't found anything.. that was not a good file name. String fullFName = targetDir + AAIConstants.AAI_FILESEP + thisSnapPrefix; @@ -816,11 +864,11 @@ public class DataSnapshot { LOGGER.debug(emsg); throw new Exception(emsg); } - + return snapFilesArrList; } - - + + public static void verifyGraph(JanusGraph graph) { if (graph == null) { @@ -831,5 +879,64 @@ public class DataSnapshot { } + class CommandLineArgs { + + + + @Parameter(names = "--help", help = true) + public boolean help; + + @Parameter(names = "-c", description = "command for taking data snapshot") + public String command = "JUST_TAKE_SNAPSHOT"; + @Parameter(names = "-f", description = "previous snapshot file to reload") + public String oldFileName = ""; + + @Parameter(names = "-snapshotType", description = "snapshot type of gryo or graphson") + public String snapshotType = "graphson"; + + @Parameter(names = "-threadCount", description = "thread count for create") + public int threadCount = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_THREADS_FOR_CREATE; + + @Parameter(names = "-debugFlag", description = "DEBUG flag") + public String debugFlag = ""; + + @Parameter(names = "-debugAddDelayTime", description = "delay in ms between each Add for debug mode") + public long debugAddDelayTime = 1L; + + @Parameter(names = "-vertAddDelayMs", description = "delay in ms while adding each vertex") + public long vertAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS.longValue(); + + @Parameter(names = "-edgeAddDelayMs", description = "delay in ms while adding each edge") + public long edgeAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS.longValue(); + + @Parameter(names = "-failureDelayMs", description = "delay in ms when failure to load vertex or edge in snapshot") + public long failureDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS.longValue(); + + @Parameter(names = "-retryDelayMs", description = "time in ms after which load snapshot is retried") + public long retryDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS.longValue(); + + @Parameter(names = "-maxErrorsPerThread", description = "max errors allowed per thread") + public int maxErrorsPerThread = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD; + + @Parameter(names = "-vertToEdgeProcDelay", description = "vertex to edge processing delay in ms") + public long vertToEdgeProcDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS.longValue(); + + @Parameter(names = "-staggerThreadDelay", description = "thread delay stagger time in ms") + public long staggerThreadDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_STAGGER_THREAD_DELAY_MS; + + @Parameter(names = "-fileName", description = "file name for generating snapshot ") + public String fileName = ""; + + @Parameter(names = "-snapshotDir", description = "file path for generating snapshot ") + public String snapshotDir = ""; + + @Parameter(names = "-oldFileDir", description = "directory containing the old snapshot file for reloading") + public String oldFileDir = ""; + + @Parameter(names = "-caller", description = "process invoking the dataSnapshot") + public String caller = ""; + + } + }
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java index 404e243..7092aa8 100644 --- a/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java +++ b/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java @@ -67,7 +67,7 @@ public class DataSnapshotTasks { LOGGER.info("Started cron job dataSnapshot @ " + dateFormat.format(new Date())); try { if (AAIConfig.get("aai.cron.enable.dataSnapshot").equals("true")) { - String [] dataSnapshotParms = AAIConfig.get("aai.datasnapshot.params", "JUST_TAKE_SNAPSHOT").split("\\s+"); + String [] dataSnapshotParms = {"-c",AAIConfig.get("aai.datasnapshot.params", "JUST_TAKE_SNAPSHOT")}; LOGGER.info("DataSnapshot Params {}", Arrays.toString(dataSnapshotParms)); DataSnapshot.main(dataSnapshotParms); } diff --git a/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java b/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java index af858ae..0f03ee0 100644 --- a/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java +++ b/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java @@ -223,9 +223,8 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ LOGGER.debug(" -- Could not convert line to JsonObject [ " + graphSonLine + "]" ); LOGGER.debug(" -- ErrorMsg = [" +e.getMessage() + "]"); - return(" DEBUG -a- JSON translation exception when processing this line ---"); - //xxxxxDEBUGxxxxx I think we put some info on the return String and then return? - } + return(" JSON translation or getVid exception when processing this line [" + graphSonLine + "]"); + } // ----------------------------------------------------------------------------------------- // Note - this assumes that any vertices referred to by an edge will already be in the DB. @@ -242,11 +241,9 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ LOGGER.debug(" -- " + passInfo + " translate VertexId before adding edges failed for this: vtxId = " + originalVid + ". ErrorMsg = [" +e.getMessage() + "]"); - return(" DEBUG -b- there VID-translation error when processing this line ---"); - //xxxxxDEBUGxxxxx I think we put some info on the return String and then return? + return(" VID-translation error when processing this line ---"); } - - + try { dbVtx = getVertexFromDbForVid(newVidStr); } @@ -254,8 +251,7 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ LOGGER.debug(" -- " + passInfo + " READ Vertex from DB before adding edges failed for this: vtxId = " + originalVid + ", newVidId = " + newVidL + ". ErrorMsg = [" +e.getMessage() + "]"); - return(" -- there was an error processing this line --- Line = [" + graphSonLine + "]"); - //xxxxxxDEBUGxxxx I think we put some info on the return String and then return? + return(" ERROR getting Vertex based on VID = " + newVidStr + "]"); } @@ -268,8 +264,7 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ catch ( Exception e ){ LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding EDGES for this vertex: vtxId = " + originalVid + ". ErrorMsg = [" +e.getMessage() + "]"); - //xxxxxxxxxx I think we put some info on the return String and then return? - return(" DEBUG -d- there was an error doing the commit while processing edges for this line ---"); + return(" ERROR with committing edges for vertexId = " + originalVid ); } } @@ -283,15 +278,13 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ catch ( Exception e ){ LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding Properties for this vertex: vtxId = " + originalVid + ". ErrorMsg = [" +e.getMessage() + "]"); - //xxxxxxxxxx I think we put some info on the return String and then return? - return(" DEBUG -e- there was an error doing the commit while processing Properties for this line ---"); + return(" ERROR with committing properties for vertexId = " + originalVid ); } } else { - LOGGER.debug("DEBUG " + passInfo + " Error processing Properties for this vertex: vtxId = " + originalVid ); - - //xxxxxxxxxx I think we put some info on the return String and then return? - return(" DEBUG -f- there was an error while processing Properties for this line ---"); + LOGGER.debug("DEBUG " + passInfo + " Error processing Properties for this vertex: vtxId = " + + originalVid + ", [" + pResStr + "]"); + return(" ERROR processing properties for vertexId = " + originalVid + ", [" + pResStr + "]"); } } @@ -306,18 +299,15 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ JSONArray propsDetArr = propsOb.getJSONArray(pKey); for( int i=0; i< propsDetArr.length(); i++ ){ JSONObject prop = propsDetArr.getJSONObject(i); - String val = prop.getString("value"); - dbVtx.property(pKey, val); //DEBUGjojo -- val is always String here.. which is not right -------------------DEBUG + Object val = prop.get("value"); + dbVtx.property(pKey, val); // DEBUG - not sure if this is would handle String[] properties? } } - } catch ( Exception e ){ LOGGER.debug(" -- " + passInfo + " failure getting/setting properties for: vtxId = " + originalVid + ". ErrorMsg = [" + e.getMessage() + "]"); - //xxxDEBUGxxxxxxx I think we put some info on the return String and then return? - return(" DEBUG -g- there was an error adding properties while processing this line ---"); - + return(" error processing properties for vtxId = " + originalVid); } return ""; @@ -371,7 +361,6 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ Iterator <String> edItr = edOb.keys(); while( edItr.hasNext() ){ String eLabel = edItr.next(); - String inVid = ""; // Note - this should really be a Long? JSONArray edArr = edOb.getJSONArray(eLabel); for( int i=0; i< edArr.length(); i++ ){ JSONObject eObj = edArr.getJSONObject(i); @@ -395,22 +384,19 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{ Iterator <String> ePropsItr = ePropsOb.keys(); while( ePropsItr.hasNext() ){ String pKey = ePropsItr.next(); - tmpE.property(pKey, ePropsOb.getString(pKey)); + tmpE.property(pKey, ePropsOb.get(pKey)); } } } } - } catch ( Exception e ){ String msg = " -- " + passInfo + " failure adding edge for: original vtxId = " + originalVid + ". ErrorMsg = [" +e.getMessage() + "]"; LOGGER.debug( " -- " + msg ); - //xxxxxxDEBUGxxxx I think we might need some better info on the return String to return? LOGGER.debug(" -- now going to return/bail out of processEdgesForVtx" ); return(" >> " + msg ); - - } + } return ""; } diff --git a/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java b/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java index 387f45e..3afd295 100644 --- a/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java +++ b/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java @@ -66,8 +66,6 @@ public class PartialVertexLoader implements Callable<HashMap<String,String>>{ int retryFailureCount = 0; HashMap <String,String> failedAttemptHash = new HashMap <String,String> (); HashMap <String,String> old2NewVtxIdHash = new HashMap <String,String> (); - GraphSONReader gsr = GraphSONReader.build().create(); - // Read this file into a JSON object JsonParser parser = new JsonParser(); @@ -180,13 +178,17 @@ public class PartialVertexLoader implements Callable<HashMap<String,String>>{ } try { jg.tx().commit(); - // If this worked, we can take it off of the failed list - failedAttemptHash.remove(failedVidStr); + LOGGER.debug(" -- addVertex Successful RETRY for vtxId = " + + failedVidStr + ", label = [" + failedLabel + "]"); } catch ( Exception e ){ retryFailureCount++; - LOGGER.debug(" -- COMMIT FAILED for RETRY for vtxId = " + failedVidStr - + ", label = [" + failedLabel + "]. ErrorMsg = [" + e.getMessage() + "]" ); + // Note - this is a "POSSIBLE" error because the reason the commit fails may be that + // the node is a dupe or has some other legit reason that it should not be in the DB. + LOGGER.debug(" --POSSIBLE ERROR-- COMMIT FAILED for RETRY for vtxId = " + failedVidStr + + ", label = [" + failedLabel + "]. ErrorMsg = [" + e.getMessage() + + "]. This vertex will not be tried again. "); + e.printStackTrace(); if( retryFailureCount > maxAllowedErrors ) { LOGGER.debug(">>> Abandoning PartialVertexLoader() because " + diff --git a/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java b/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java index 791ae15..493678b 100644 --- a/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java +++ b/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java @@ -25,25 +25,28 @@ import java.util.Iterator; import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.io.IoCore; +import org.apache.tinkerpop.gremlin.structure.io.GraphWriter; import org.janusgraph.core.JanusGraph; public class PrintVertexDetails implements Runnable{ - - //private static EELFLogger LOGGER; private JanusGraph jg; private String fname; private ArrayList<Vertex> vtxList; private Boolean debugOn; - private int debugDelayMs; - - public PrintVertexDetails (JanusGraph graph, String fn, ArrayList<Vertex> vL, Boolean debugFlag, int debugDelay){ + private long debugDelayMs; + private String snapshotType; + + static final byte[] newLineBytes = "\n".getBytes(); + + public PrintVertexDetails (JanusGraph graph, String fn, ArrayList<Vertex> vL, Boolean debugFlag, long debugAddDelayTime, String snapshotType){ jg = graph; fname = fn; vtxList = vL; debugOn = debugFlag; - debugDelayMs = debugDelay; + debugDelayMs = debugAddDelayTime; + this.snapshotType = snapshotType; } public void run(){ @@ -55,6 +58,12 @@ public class PrintVertexDetails implements Runnable{ Long debugDelayMsL = new Long(debugDelayMs); FileOutputStream subFileStr = new FileOutputStream(fname); Iterator <Vertex> vSubItr = vtxList.iterator(); + GraphWriter graphWriter = null; + if("gryo".equalsIgnoreCase(snapshotType)){ + graphWriter = jg.io(IoCore.gryo()).writer().create(); + } else { + graphWriter = jg.io(IoCore.graphson()).writer().create(); + } while( vSubItr.hasNext() ){ Long vertexIdL = 0L; String aaiNodeType = ""; @@ -68,7 +77,8 @@ public class PrintVertexDetails implements Runnable{ aaiUuid = (String) tmpV.property("aai-uuid").orElse(null); Thread.sleep(debugDelayMsL); // Make sure it doesn't bump into itself - jg.io(IoCore.graphson()).writer().create().writeVertex(subFileStr, tmpV, Direction.BOTH); + graphWriter.writeVertex(subFileStr, tmpV, Direction.BOTH); + subFileStr.write(newLineBytes); okCount++; } catch(Exception e) { @@ -94,7 +104,11 @@ public class PrintVertexDetails implements Runnable{ int count = vtxList.size(); Iterator <Vertex> vSubItr = vtxList.iterator(); FileOutputStream subFileStr = new FileOutputStream(fname); - jg.io(IoCore.graphson()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH); + if ("gryo".equalsIgnoreCase(snapshotType)) { + jg.io(IoCore.gryo()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH); + } else { + jg.io(IoCore.graphson()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH); + } subFileStr.close(); System.out.println(" -- Printed " + count + " vertexes out to " + fname); } diff --git a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java index ebef01d..c45ad28 100644 --- a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java +++ b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java @@ -1,123 +1,139 @@ -/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.db.schema;
-
-import java.io.IOException;
-import java.util.UUID;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.codehaus.jackson.JsonGenerationException;
-import org.onap.aai.dbmap.AAIGraphConfig;
-import org.onap.aai.edges.EdgeIngestor;
-import org.onap.aai.exceptions.AAIException;
-import org.onap.aai.setup.SchemaVersions;
-import org.onap.aai.logging.LoggingContext;
-import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.onap.aai.util.AAIConfig;
-import com.beust.jcommander.JCommander;
-import com.beust.jcommander.Parameter;
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-
-public class ScriptDriver {
-
- /**
- * The main method.
- *
- * @param args the arguments
- * @throws AAIException the AAI exception
- * @throws JsonGenerationException the json generation exception
- * @throws IOException Signals that an I/O exception has occurred.
- */
- public static void main (String[] args) throws AAIException, IOException, ConfigurationException {
- CommandLineArgs cArgs = new CommandLineArgs();
-
- LoggingContext.init();
- LoggingContext.component("DBSchemaScriptDriver");
- LoggingContext.partnerName("NA");
- LoggingContext.targetEntity("AAI");
- LoggingContext.requestId(UUID.randomUUID().toString());
- LoggingContext.serviceName("AAI");
- LoggingContext.targetServiceName("main");
- LoggingContext.statusCode(StatusCode.COMPLETE);
- LoggingContext.responseCode(LoggingContext.SUCCESS);
-
- new JCommander(cArgs, args);
-
- if (cArgs.help) {
- System.out.println("-c [path to graph configuration] -type [what you want to audit - oxm or graph]");
- }
-
- AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
- "org.onap.aai.config",
- "org.onap.aai.setup"
- );
-
- AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class);
- SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
- EdgeIngestor edgeIngestor = ctx.getBean(EdgeIngestor.class);
-
- String config = cArgs.config;
- AAIConfig.init();
-
- PropertiesConfiguration graphConfiguration = new AAIGraphConfig
- .Builder(config)
- .forService(ScriptDriver.class.getSimpleName())
- .withGraphType("NA")
- .buildConfiguration();
-
- try (JanusGraph graph = JanusGraphFactory.open(graphConfiguration)) {
- if (!("oxm".equals(cArgs.type) || "graph".equals(cArgs.type))) {
- System.out.println("type: " + cArgs.type + " not recognized.");
- System.exit(1);
- }
-
- AuditDoc doc = null;
- if ("oxm".equals(cArgs.type)) {
- doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor).getAuditDoc();
- } else if ("graph".equals(cArgs.type)) {
- doc = auditorFactory.getGraphAuditor(graph).getAuditDoc();
- }
-
- ObjectMapper mapper = new ObjectMapper();
-
- String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(doc);
- System.out.println(json);
- }
- }
-
-}
-
-class CommandLineArgs {
-
- @Parameter(names = "--help", description = "Help")
- public boolean help = false;
-
- @Parameter(names = "-c", description = "Configuration", required=true)
- public String config;
-
- @Parameter(names = "-type", description = "Type", required=true)
- public String type = "graph";
-
-
+/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.db.schema; + +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.codehaus.jackson.JsonGenerationException; +import org.janusgraph.core.JanusGraph; +import org.janusgraph.core.JanusGraphFactory; +import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.dbmap.AAIGraphConfig; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.logging.ErrorObjectFormatException; +import org.onap.aai.logging.LoggingContext; +import org.onap.aai.logging.LoggingContext.StatusCode; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.ExceptionTranslator; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; + +import java.io.IOException; +import java.util.UUID; + +public class ScriptDriver { + + /** + * The main method. + * + * @param args the arguments + * @throws AAIException the AAI exception + * @throws JsonGenerationException the json generation exception + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void main (String[] args) throws AAIException, IOException, ConfigurationException, ErrorObjectFormatException { + CommandLineArgs cArgs = new CommandLineArgs(); + + LoggingContext.init(); + LoggingContext.component("DBSchemaScriptDriver"); + LoggingContext.partnerName("NA"); + LoggingContext.targetEntity("AAI"); + LoggingContext.requestId(UUID.randomUUID().toString()); + LoggingContext.serviceName("AAI"); + LoggingContext.targetServiceName("main"); + LoggingContext.statusCode(StatusCode.COMPLETE); + LoggingContext.responseCode(LoggingContext.SUCCESS); + ErrorLogHelper.loadProperties(); + new JCommander(cArgs, args); + + if (cArgs.help) { + System.out.println("-c [path to graph configuration] -type [what you want to audit - oxm or graph]"); + } + + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } + AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class); + SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); + EdgeIngestor edgeIngestor = ctx.getBean(EdgeIngestor.class); + + String config = cArgs.config; + AAIConfig.init(); + + PropertiesConfiguration graphConfiguration = new AAIGraphConfig + .Builder(config) + .forService(ScriptDriver.class.getSimpleName()) + .withGraphType("NA") + .buildConfiguration(); + + try (JanusGraph graph = JanusGraphFactory.open(graphConfiguration)) { + if (!("oxm".equals(cArgs.type) || "graph".equals(cArgs.type))) { + System.out.println("type: " + cArgs.type + " not recognized."); + System.exit(1); + } + + AuditDoc doc = null; + if ("oxm".equals(cArgs.type)) { + doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor).getAuditDoc(); + } else if ("graph".equals(cArgs.type)) { + doc = auditorFactory.getGraphAuditor(graph).getAuditDoc(); + } + + ObjectMapper mapper = new ObjectMapper(); + + String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(doc); + System.out.println(json); + } + } + +} + +class CommandLineArgs { + + @Parameter(names = "--help", description = "Help") + public boolean help = false; + + @Parameter(names = "-c", description = "Configuration", required=true) + public String config; + + @Parameter(names = "-type", description = "Type", required=true) + public String type = "graph"; + + }
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/dbgen/DupeTool.java b/src/main/java/org/onap/aai/dbgen/DupeTool.java index 7b7ef99..fd5ae00 100644 --- a/src/main/java/org/onap/aai/dbgen/DupeTool.java +++ b/src/main/java/org/onap/aai/dbgen/DupeTool.java @@ -19,22 +19,19 @@ */ package org.onap.aai.dbgen; -import java.io.FileInputStream; -import java.io.InputStream; -import java.util.*; -import java.util.Map.Entry; - +import com.att.eelf.configuration.Configuration; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; -import org.apache.tinkerpop.gremlin.structure.Direction; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.apache.tinkerpop.gremlin.structure.Graph; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.VertexProperty; -import org.onap.aai.db.props.AAIProperties; -import org.onap.aai.dbmap.AAIGraphConfig; +import org.apache.tinkerpop.gremlin.structure.*; +import org.janusgraph.core.JanusGraph; +import org.janusgraph.core.JanusGraphFactory; +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; +import org.onap.aai.dbmap.AAIGraphConfig; +import org.onap.aai.edges.enums.AAIDirection; import org.onap.aai.edges.enums.EdgeProperty; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Introspector; @@ -45,19 +42,19 @@ import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LogFormatTools; import org.onap.aai.logging.LoggingContext; import org.onap.aai.logging.LoggingContext.StatusCode; -import org.onap.aai.edges.enums.AAIDirection; import org.onap.aai.setup.SchemaVersions; import org.onap.aai.util.AAIConfig; import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.ExceptionTranslator; +import org.onap.aai.util.GraphAdminConstants; import org.slf4j.MDC; - -import com.att.eelf.configuration.Configuration; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import org.janusgraph.core.JanusGraphFactory; -import org.janusgraph.core.JanusGraph; import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import java.io.FileInputStream; +import java.io.InputStream; +import java.util.*; +import java.util.Map.Entry; + public class DupeTool { private static final EELFLogger logger = EELFManager.getInstance().getLogger(DupeTool.class.getSimpleName()); @@ -76,6 +73,7 @@ public class DupeTool { } private LoaderFactory loaderFactory; + private int dupeGroupCount = 0; public DupeTool(LoaderFactory loaderFactory, SchemaVersions schemaVersions){ this(loaderFactory, schemaVersions, true); @@ -89,7 +87,7 @@ public class DupeTool { public void execute(String[] args){ - String defVersion = "v12"; + String defVersion = "v15"; try { defVersion = AAIConfig.get(AAIConstants.AAI_DEFAULT_API_VERSION_PROP); } catch (AAIException ae) { @@ -101,7 +99,7 @@ public class DupeTool { exit(0); } - + dupeGroupCount = 0; Loader loader = null; try { loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion()); @@ -120,8 +118,8 @@ public class DupeTool { try { AAIConfig.init(); - int maxRecordsToFix = AAIConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX; - int sleepMinutes = AAIConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES; + int maxRecordsToFix = GraphAdminConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX; + int sleepMinutes = GraphAdminConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES; int timeWindowMinutes = 0; // A value of 0 means that we will not have a time-window -- we will look // at all nodes of the passed-in nodeType. long windowStartTime = 0; // Translation of the window into a starting timestamp @@ -137,7 +135,7 @@ public class DupeTool { } } catch (Exception e) { // Don't worry, we'll just use the defaults that we got from AAIConstants - logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file. Will use defaults. "); + logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file. Will use defaults. " + e.getMessage()); } String nodeTypeVal = ""; @@ -364,7 +362,7 @@ public class DupeTool { showNodeDetailsForADupeSet(gt1, firstPassDupeSets.get(x), logger); } } - + dupeGroupCount = firstPassDupeSets.size(); boolean didSomeDeletesFlag = false; ArrayList<String> dupeSetsToFix = new ArrayList<String>(); if (autoFix && firstPassDupeSets.size() == 0) { @@ -405,6 +403,7 @@ public class DupeTool { + " sets of duplicates that we think can be deleted. "; logger.info(msg); System.out.println(msg); + if (dupeSetsToFix.size() > 0) { msg = " Here is what the sets look like: "; logger.info(msg); @@ -492,7 +491,7 @@ public class DupeTool { * * @param args the arguments */ - public static void main(String[] args) { + public static void main(String[] args) throws AAIException { System.setProperty("aai.service.name", DupeTool.class.getSimpleName()); // Set the logging file properties to be used by EELFManager @@ -511,11 +510,23 @@ public class DupeTool { LoggingContext.statusCode(StatusCode.COMPLETE); LoggingContext.responseCode(LoggingContext.SUCCESS); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); - + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + logger.error("Problems running DupeTool "+aai.getMessage()); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); DupeTool dupeTool = new DupeTool(loaderFactory, schemaVersions); @@ -1080,10 +1091,12 @@ public class DupeTool { Boolean specialTenantRule, Loader loader, EELFLogger logger) throws AAIException { - // This method assumes that it is being passed a List of vertex objects - // which violate our uniqueness constraints. - + // This method assumes that it is being passed a List of + // vertex objects which violate our uniqueness constraints. + // Note - returning a null vertex means we could not + // safely pick one to keep (Ie. safely know which to delete.) Vertex nullVtx = null; + GraphTraversalSource gts = g.traversal(); if (dupeVertexList == null) { return nullVtx; @@ -1095,12 +1108,35 @@ public class DupeTool { if (listSize == 1) { return (dupeVertexList.get(0)); } + + // If they don't all have the same aai-uri, then we will not + // choose between them - we'll need someone to manually + // check to pick which one makes sense to keep. + Object uriOb = dupeVertexList.get(0).<Object>property("aai-uri").orElse(null); + if( uriOb == null || uriOb.toString().equals("") ){ + // this is a bad node - hopefully will be picked up by phantom checker + return nullVtx; + } + String thisUri = uriOb.toString(); + for (int i = 1; i < listSize; i++) { + uriOb = dupeVertexList.get(i).<Object>property("aai-uri").orElse(null); + if( uriOb == null || uriOb.toString().equals("") ){ + // this is a bad node - hopefully will be picked up by phantom checker + return nullVtx; + } + String nextUri = uriOb.toString(); + if( !thisUri.equals(nextUri)){ + // there are different URI's on these - so we can't pick + // a dupe to keep. Someone will need to look at it. + return nullVtx; + } + } Vertex vtxPreferred = null; Vertex currentFaveVtx = dupeVertexList.get(0); for (int i = 1; i < listSize; i++) { Vertex vtxB = dupeVertexList.get(i); - vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g, + vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, gts, currentFaveVtx, vtxB, ver, specialTenantRule, loader, logger); if (vtxPreferred == null) { // We couldn't choose one @@ -1110,7 +1146,14 @@ public class DupeTool { } } - return (currentFaveVtx); + if( currentFaveVtx != null && checkAaiUriOk(gts, currentFaveVtx, logger) ){ + return (currentFaveVtx); + } + else { + // We had a preferred vertex, but its aai-uri was bad, so + // we will not recommend one to keep. + return nullVtx; + } } // end of getPreferredDupe() @@ -1120,7 +1163,7 @@ public class DupeTool { * * @param transId the trans id * @param fromAppId the from app id - * @param g the g + * @param g the graphTraversalSource * @param vtxA the vtx A * @param vtxB the vtx B * @param ver the ver @@ -1130,7 +1173,7 @@ public class DupeTool { * @throws AAIException the AAI exception */ public Vertex pickOneOfTwoDupes(String transId, - String fromAppId, Graph g, Vertex vtxA, + String fromAppId, GraphTraversalSource gts, Vertex vtxA, Vertex vtxB, String ver, Boolean specialTenantRule, Loader loader, EELFLogger logger) throws AAIException { Vertex nullVtx = null; @@ -1289,11 +1332,13 @@ public class DupeTool { } if (allTheSame) { - if (vidA < vidB) { - preferredVtx = vtxA; - } else { - preferredVtx = vtxB; - } + if ( checkAaiUriOk(gts, vtxA, logger) ) { + preferredVtx = vtxA; + } + else if ( checkAaiUriOk(gts, vtxB, logger) ) { + preferredVtx = vtxB; + } + // else we're picking neither because neither one had a working aai-uri index property } else if (specialTenantRule) { // They asked us to apply a special rule if it applies if (vtxIdsConn2A.size() == 2 && vtxANodeType.equals("tenant")) { @@ -1575,6 +1620,71 @@ public class DupeTool { }// End of getNodeKeyVals() + + + /** + * makes sure aai-uri exists and can be used to get this node back + * + * @param transId the trans id + * @param fromAppId the from app id + * @param graph the graph + * @param vtx + * @param EELFLogger + * @return true if aai-uri is populated and the aai-uri-index points to this vtx + * @throws AAIException the AAI exception + */ + private Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx, EELFLogger eLogger ) + throws AAIException{ + String aaiUriStr = ""; + try { + Object ob = origVtx.<Object>property("aai-uri").orElse(null); + String origVid = origVtx.id().toString(); + if (ob == null || ob.toString().equals("")) { + // It is missing its aai-uri + eLogger.debug("DEBUG No [aai-uri] property found for vid = [" + + origVid + "] " ); + return false; + } + else { + aaiUriStr = ob.toString(); + Iterator <Vertex> verts = graph.V().has("aai-uri",aaiUriStr); + int count = 0; + while( verts.hasNext() ){ + count++; + Vertex foundV = verts.next(); + String foundVid = foundV.id().toString(); + if( !origVid.equals(foundVid) ){ + eLogger.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] brought back different vertex with vid = [" + + foundVid + "]." ); + return false; + } + } + if( count == 0 ){ + eLogger.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] could not be used to query for that vertex. "); + return false; + } + else if( count > 1 ){ + eLogger.debug("DEBUG aai-uri key property [" + + aaiUriStr + "] for vid = [" + + origVid + "] brought back multiple (" + + count + ") vertices instead of just one. "); + return false; + } + } + } + catch( Exception ex ){ + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + eLogger.error(" ERROR trying to get node with aai-uri: [" + aaiUriStr + "]" + LogFormatTools.getStackTop(ex)); + } + return true; + + }// End of checkAaiUriOk() + /** * Get values of the key properties for a node as a single string @@ -1850,5 +1960,14 @@ public class DupeTool { logger.warn("WARNING from final graph.shutdown()", ex); } } + + public int getDupeGroupCount() { + return dupeGroupCount; + } + + public void setDupeGroupCount(int dgCount) { + this.dupeGroupCount = dgCount; + } + } diff --git a/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java b/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java new file mode 100644 index 0000000..ecd95a7 --- /dev/null +++ b/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java @@ -0,0 +1,906 @@ +/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree;
+import org.apache.tinkerpop.gremlin.structure.Element;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.node.ObjectNode;
+import org.codehaus.jackson.type.TypeReference;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.dbmap.InMemoryGraph;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.parsers.uri.URIToObject;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.InMemoryDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.tinkerpop.TreeBackedVertex;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.AAISystemExitUtil;
+import org.onap.aai.util.ExceptionTranslator;
+import org.slf4j.MDC;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.io.*;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.InvalidPathException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/*
+ * The Class ListEndpoints.
+ */
+public class DynamicPayloadGenerator {
+
+ /*
+ * Create a Dynamic memory graph instance which should not affect the
+ * AAIGraph
+ */
+ private InMemoryGraph inMemGraph = null;
+
+ private InMemoryDBEngine dbEngine;
+ private InputStream sequenceInputStreams;
+ /*
+ * Loader, QueryStyle, ConnectionType for the Serializer
+ */
+ private Loader loader;
+ private String urlBase;
+ private BufferedWriter bw = null;
+ private boolean exitFlag = true;
+ private CommandLineArgs cArgs;
+
+ private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DynamicPayloadGenerator.class);
+
+ private static final QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+ private static final DBConnectionType type = DBConnectionType.CACHED;
+ private static final ModelType introspectorFactoryType = ModelType.MOXY;
+ private final LoaderFactory loaderFactory;
+ private final EdgeIngestor edgeRules;
+ private final SchemaVersions schemaVersions;
+ private final SchemaVersion version;
+
+ public DynamicPayloadGenerator(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){
+ this.loaderFactory = loaderFactory;
+ this.edgeRules = edgeIngestor;
+ this.schemaVersions = schemaVersions;
+ this.version = schemaVersions.getDefaultVersion();
+ }
+
+ /**
+ * The run method.
+ *
+ * @param args
+ * the arguments
+ * @param exitFlag true if running from a shell script to call system exit, false if running from scheduled task
+ * @throws AAIException
+ * @throws Exception
+ */
+
+ public static void run (LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions, String[] args, boolean isSystemExit) {
+ //
+ MDC.put("logFilenameAppender", DynamicPayloadGenerator.class.getSimpleName());
+ DynamicPayloadGenerator payloadgen = new DynamicPayloadGenerator(loaderFactory, edgeIngestor, schemaVersions);
+ payloadgen.exitFlag = isSystemExit;
+ try {
+ payloadgen.init(args);
+
+ payloadgen.generatePayloads();
+ } catch (AAIException e) {
+ LOGGER.error("Exception " + LogFormatTools.getStackTop(e));
+ } catch (IOException e) {
+ LOGGER.error("Exception " + LogFormatTools.getStackTop(e));
+ }
+ if ( isSystemExit ) {
+ AAISystemExitUtil.systemExitCloseAAIGraph(1);
+ }
+ else {
+ AAISystemExitUtil.systemExitCloseAAIGraph(0);
+ }
+
+ }
+ public static void main(String[] args) throws AAIException {
+ AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+ PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+ initializer.initialize(ctx);
+ try {
+ ctx.scan(
+ "org.onap.aai.config",
+ "org.onap.aai.setup"
+ );
+ ctx.refresh();
+ } catch (Exception e) {
+ AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+ LOGGER.error("Problems running tool "+aai.getMessage());
+ LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+ LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+ ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+ throw aai;
+
+ }
+ LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+ EdgeIngestor edgeIngestor = ctx.getBean(EdgeIngestor.class);
+ SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+ run (loaderFactory, edgeIngestor, schemaVersions, args, true);
+ }
+
+
+ public void taskExit() {
+ if ( this.exitFlag ) {
+ AAISystemExitUtil.systemExitCloseAAIGraph(1);
+ }
+ else {
+ AAISystemExitUtil.systemExitCloseAAIGraph(0);
+ }
+ }
+ public void init(String[] args) throws AAIException {
+ cArgs = new CommandLineArgs();
+ JCommander jCommander = new JCommander(cArgs, args);
+ jCommander.setProgramName(DynamicPayloadGenerator.class.getSimpleName());
+ LOGGER.info("Snapshot file " + cArgs.dataSnapshot);
+
+
+ // TODO- How to add dynamic.properties
+
+ LOGGER.info("output file " + cArgs.output);
+ LOGGER.info("format file " + cArgs.format);
+ LOGGER.info("schema enabled " + cArgs.schemaEnabled);
+ LOGGER.info("Multiple snapshots " + cArgs.isMultipleSnapshot);
+ LOGGER.info("Is Partial Graph " + cArgs.isPartialGraph);
+
+ if (cArgs.config.isEmpty())
+ cArgs.config = AAIConstants.AAI_HOME_ETC_APP_PROPERTIES + "dynamic.properties";
+
+ LOGGER.info("config file " + cArgs.config);
+ if (cArgs.nodePropertyFile.isEmpty())
+ cArgs.nodePropertyFile = AAIConstants.AAI_HOME_ETC_SCRIPT + "/tenant_isolation/nodes.json";
+ LOGGER.info("nodePropertyFile file " + cArgs.nodePropertyFile);
+
+ if (cArgs.inputFilterPropertyFile.isEmpty())
+ cArgs.inputFilterPropertyFile = AAIConstants.AAI_HOME_ETC_SCRIPT + "/tenant_isolation/inputFilters.json";
+ LOGGER.info("inputFilterPropertyFile file " + cArgs.inputFilterPropertyFile);
+
+ if (cArgs.isPartialGraph)
+ cArgs.dataSnapshot = cArgs.dataSnapshot+".partial";
+
+ if (!cArgs.isMultipleSnapshot) {
+ validateFile(cArgs.dataSnapshot);
+ } else {
+ // for multiple snapshots dataSnapshot + ".P" is the prefix of the
+ // files
+ sequenceInputStreams = validateMultipleSnapshots(cArgs.dataSnapshot);
+ }
+
+ LOGGER.info("Datasnapshot file " + cArgs.dataSnapshot);
+ AAIConfig.init();
+
+ urlBase = AAIConfig.get("aai.server.url.base", "");
+
+ }
+
+ public void generatePayloads() throws AAIException, IOException {
+
+ List<Map<String, List<String>>> nodeFilters = readFile(cArgs.nodePropertyFile);
+ /*
+ * Read the inputFilters which will include for each node-type the regex that needs to be
+ * applied and the filtered-node-type
+ * For eg: complex --> apply regex on cloud-region and then traverse to complex
+ * complex --> filtered-node-type: cloud-region, filters: include regex on cloud-region
+ */
+ /*
+ * Example:
+ * { "cloud-region" :
+ * {"filtered-node-type":"cloud-region",
+ * "filters": [ { "property": "cloud-owner", "regex": "att-aic" },
+ * { "property": "cloud-region-id", "regex": "M*" },
+ * { "property":"cloud-region-version", "regex": "aic2.5|aic3.0" }
+ * ] },
+ * "complex" : {
+ * "filtered-node-type":"cloud-region",
+ * "filters": [ { "property": "cloud-owner", "regex": "att-aic" },
+ * { "property": "cloud-region-id", "regex": "M*" },
+ * { "property":"cloud-region-version", "regex": "aic2.5|aic3.0" }
+ * ] },
+ *
+ * } }
+ */
+ Map<String, Map<String, String>> inputFilters = readInputFilterPropertyFile(cArgs.inputFilterPropertyFile);
+ Map<String, String> filteredNodeTypes = findFilteredNodeTypes(cArgs.inputFilterPropertyFile);
+ // Read the input filter criteria
+ LOGGER.info("Load the Graph");
+
+ this.loadGraph();
+ LOGGER.info("Generate payload");
+ this.generatePayload(nodeFilters, inputFilters, filteredNodeTypes);
+ LOGGER.info("Close graph");
+ this.closeGraph();
+
+ }
+
+ private List<Map<String, List<String>>> readFile(String inputFile) throws IOException {
+
+ // validate that we can read the inputFile
+ validateFile(inputFile);
+
+ InputStream is = new FileInputStream(inputFile);
+ Scanner scanner = new Scanner(is);
+ String jsonFile = scanner.useDelimiter("\\Z").next();
+ scanner.close();
+
+ List<Map<String, List<String>>> allNodes = new ArrayList<>();
+ Map<String, List<String>> filterCousins = new HashMap<>();
+ Map<String, List<String>> filterParents = new HashMap<>();
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ JsonNode rootNode = mapper.readTree(jsonFile);
+
+ Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();
+
+ while (nodeFields.hasNext()) {
+ Entry<String, JsonNode> entry = nodeFields.next();
+ String nodeType = entry.getKey();
+ JsonNode nodeProperty = entry.getValue();
+
+ JsonNode cousinFilter = nodeProperty.path("cousins");
+ JsonNode parentFilter = nodeProperty.path("parents");
+ List<String> cousins = new ObjectMapper().readValue(cousinFilter.traverse(),
+ new TypeReference<ArrayList<String>>() {
+ });
+
+ List<String> parents = new ObjectMapper().readValue(parentFilter.traverse(),
+ new TypeReference<ArrayList<String>>() {
+ });
+ for (String cousin : cousins) {
+ LOGGER.info("Cousins-Filtered " + cousin);
+ }
+ for (String parent : parents) {
+ LOGGER.info("Parents-Filtered " + parent);
+ }
+ filterCousins.put(nodeType, cousins);
+ filterParents.put(nodeType, parents);
+
+ }
+
+ allNodes.add(filterCousins);
+ allNodes.add(filterParents);
+ return allNodes;
+
+ }
+
+ /* Example:
+{
+ "cloud-region" : {
+ "filtered-node-type" :"cloud-region",
+ "filters": [
+ {
+ "property": "cloud-owner",
+ "regex": "att-aic"
+ },
+ {
+ "property": "cloud-region-id",
+ "regex": "M*"
+ },
+ {
+ "property": "cloud-region-version",
+ "regex": "aic2.5|aic3.0"
+ }
+ ]
+ },
+ "complex" : {
+ "filters":[
+ ]
+
+ }
+}
+*/
+ private Map<String, Map<String, String>> readInputFilterPropertyFile(String inputFile) throws IOException {
+
+ validateFile(inputFile);
+
+ InputStream is = new FileInputStream(inputFile);
+ Scanner scanner = new Scanner(is);
+ String jsonFile = scanner.useDelimiter("\\Z").next();
+ scanner.close();
+
+ Map<String, Map<String, String>> propToRegex = new HashMap<String, Map<String, String>>();
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ JsonNode rootNode = mapper.readTree(jsonFile);
+
+ Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();
+
+ while (nodeFields.hasNext()) {
+ Entry<String, JsonNode> entry = nodeFields.next();
+ String nodeType = entry.getKey();
+ JsonNode nodeProperty = entry.getValue();
+
+ JsonNode filter = nodeProperty.path("filters");
+ List<JsonNode> filterMap = new ObjectMapper().readValue(filter.traverse(),
+ new TypeReference<ArrayList<JsonNode>>() {
+ });
+ HashMap<String, String> filterMaps = new HashMap<String, String>();
+ for (JsonNode n : filterMap) {
+ filterMaps.put(n.get("property").asText(), n.get("regex").asText());
+ }
+
+ propToRegex.put(nodeType, filterMaps);
+ }
+ return (propToRegex);
+ }
+
+ private Map<String, String> findFilteredNodeTypes(String inputFile) throws IOException {
+
+ validateFile(inputFile);
+
+ InputStream is = new FileInputStream(inputFile);
+ Scanner scanner = new Scanner(is);
+ String jsonFile = scanner.useDelimiter("\\Z").next();
+ scanner.close();
+
+ Map<String, String> filteredNodeTypes = new HashMap<String, String>();
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ JsonNode rootNode = mapper.readTree(jsonFile);
+
+ Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();
+
+ while (nodeFields.hasNext()) {
+ Entry<String, JsonNode> entry = nodeFields.next();
+ String nodeType = entry.getKey();
+ JsonNode nodeProperty = entry.getValue();
+
+ JsonNode filter = nodeProperty.path("filtered-node-type");
+
+ filteredNodeTypes.put(nodeType, filter.asText());
+ }
+ return (filteredNodeTypes);
+ }
+
+ public void loadGraph() throws IOException {
+
+ loadGraphIntoMemory();
+ buildDbEngine();
+
+ }
+
+ private void loadGraphIntoMemory() throws IOException {
+ if (!(cArgs.isMultipleSnapshot)) {
+ inMemGraph = new InMemoryGraph.Builder().build(cArgs.dataSnapshot, cArgs.config, cArgs.schemaEnabled,
+ cArgs.isPartialGraph);
+ } else {
+ inMemGraph = new InMemoryGraph.Builder().build(sequenceInputStreams, cArgs.config, cArgs.schemaEnabled,
+ cArgs.isPartialGraph);
+ }
+ }
+
+ private void buildDbEngine() {
+ // TODO : parametrise version
+ loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+
+ dbEngine = new InMemoryDBEngine(queryStyle, type, loader, inMemGraph.getGraph());
+ dbEngine.startTransaction();
+ }
+
+ private void generatePayload(List<Map<String, List<String>>> nodeFilters,
+ Map<String, Map<String, String>> inputFilters, Map<String, String> filteredNodeTypes)
+ throws AAIException, IOException {
+
+ Map<String, List<String>> filterCousinsMap = nodeFilters.get(0);
+ Map<String, List<String>> filterParentsMap = nodeFilters.get(1);
+ Set<String> nodeTypes = filterCousinsMap.keySet();
+
+ for (String nodeType : nodeTypes) {
+ if ("DMAAP-MR".equals(cArgs.format)) {
+ bw = createFile(nodeType + ".json");
+ }
+ List<String> filterCousins = filterCousinsMap.get(nodeType);
+ List<String> filterParents = filterParentsMap.get(nodeType);
+ Map<String, String> nodeInputFilterMap = inputFilters.get(nodeType);
+ String filteredNodeType = nodeType;
+ if(filteredNodeTypes.get(nodeType) != null && !filteredNodeTypes.get(nodeType).isEmpty())
+ filteredNodeType = filteredNodeTypes.get(nodeType);
+ readVertices(nodeType, filterCousins, filterParents, nodeInputFilterMap, filteredNodeType);
+ if(bw != null)
+ bw.close();
+ LOGGER.info("All Done-" + nodeType);
+ }
+
+ }
+
+ private BufferedWriter createFile(String outfileName) throws IOException {
+ // FileLocation
+ String fileName = outfileName;
+ File outFile = new File(fileName);
+ FileWriter fw = null;
+ LOGGER.info(" Will write to " + fileName);
+ try {
+ fw = new FileWriter(outFile.getAbsoluteFile());
+ } catch (IOException i) {
+ String emsg = "Unable to write to " + fileName + " Exception = " + i.getMessage();
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ throw i;
+ }
+ return new BufferedWriter(fw);
+ }
+
+ private void createDirectory(String dirName) throws IOException {
+ // FileLocation
+ Path pathDir = null;
+ try {
+ pathDir = Paths.get(dirName);
+ } catch (InvalidPathException i) {
+ String emsg = "Directory " + dirName + " could not be found.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ try {
+ Files.createDirectories(pathDir);
+ } catch (Exception e) {
+ String emsg = "Directory " + dirName + " could not be created. Exception = " + e.getMessage();
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ }
+
+ public void readVertices(String nodeType, List<String> filterCousins, List<String> filterParents,
+ Map<String, String> nodeInputFilters, String filteredNodeType) throws AAIException, IOException {
+
+ DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, "sourceOfTruth");
+
+ /*
+ * Start with nodeType you need to filter and then traverse to the actual nodeType
+ */
+ GraphTraversal<Vertex, Vertex> gtraversal = inMemGraph.getGraph().traversal().V().has("aai-node-type",
+ filteredNodeType);
+
+
+ // input regex
+ if (nodeInputFilters != null && (!nodeInputFilters.isEmpty())) {
+ for (Map.Entry<String, String> entry : nodeInputFilters.entrySet()) {
+ String property = entry.getKey();
+ String regex = entry.getValue();
+ Pattern pa = Pattern.compile(regex);
+
+ gtraversal = gtraversal.has(property, P.test((t, p) -> {
+ Matcher m = ((Pattern) p).matcher((CharSequence) t);
+ boolean b = m.matches();
+ return b;
+ }, pa));
+ }
+ }
+
+ /*
+ * Tenant, AZ, Complex, Zone, pserver come here
+ */
+ if (!filteredNodeType.equals(nodeType)) {
+
+ EdgeRuleQuery treeEdgeRuleQuery = new EdgeRuleQuery
+ .Builder(filteredNodeType, nodeType)
+ .edgeType(EdgeType.TREE)
+ .build();
+
+ EdgeRuleQuery cousinEdgeQuery = new EdgeRuleQuery
+ .Builder(filteredNodeType, nodeType)
+ .edgeType(EdgeType.COUSIN)
+ .build();
+
+ EdgeRule rule = null;
+ boolean hasTreeEdgeRule = true;
+
+ try {
+ rule = edgeRules.getRule(treeEdgeRuleQuery);
+ } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {
+ hasTreeEdgeRule = false;
+ }
+
+ if(!hasTreeEdgeRule) {
+ try {
+ rule = edgeRules.getRule(cousinEdgeQuery);
+ } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {
+ LOGGER.error("Unable to get a tree or cousin edge between {} and {}", filteredNodeType, nodeType);
+ return;
+ }
+ }
+
+ if (rule.getDirection().toString().equals(AAIDirection.OUT.toString())) {
+ gtraversal.out(rule.getLabel()).has("aai-node-type", nodeType);
+ } else {
+ gtraversal.in(rule.getLabel()).has("aai-node-type", nodeType);
+ }
+
+ }
+
+ String dirName = cArgs.output + AAIConstants.AAI_FILESEP + nodeType + AAIConstants.AAI_FILESEP;
+ createDirectory(dirName);
+ // TODO: Formatter
+
+ if ("DMAAP-MR".equals(cArgs.format)) {
+ while (gtraversal.hasNext()) {
+ if (bw != null)
+ bw = createFile(nodeType + ".json");
+ Vertex node = gtraversal.next();
+ Introspector nodeObj = serializer.getLatestVersionView(node);
+ createPayloadForDmaap(node, nodeObj);
+ }
+ } else {
+ if ("PAYLOAD".equals(cArgs.format)) {
+ int counter = 0;
+ while (gtraversal.hasNext()) {
+ Vertex node = gtraversal.next();
+ try {
+ counter++;
+ String filename = dirName + counter + "-" + nodeType + ".json";
+ bw = createFile(filename);
+ Introspector obj = loader.introspectorFromName(nodeType);
+ Set<Vertex> seen = new HashSet<>();
+ int depth = AAIProperties.MAXIMUM_DEPTH;
+ boolean nodeOnly = false;
+
+ Tree<Element> tree = dbEngine.getQueryEngine().findSubGraph(node, depth, nodeOnly);
+ TreeBackedVertex treeVertex = new TreeBackedVertex(node, tree);
+ serializer.dbToObjectWithFilters(obj, treeVertex, seen, depth, nodeOnly, filterCousins,
+ filterParents);
+ createPayloadForPut(obj);
+ if(bw != null)
+ bw.close();
+
+ URI uri = serializer.getURIForVertex(node);
+ String filenameWithUri = dirName + counter + "-" + nodeType + ".txt";
+ bw = createFile(filenameWithUri);
+ bw.write(uri.toString());
+ bw.newLine();
+ bw.close();
+ } catch (Exception e) {
+ String emsg = "Caught exception while processing [" + counter + "-" + nodeType + "] continuing";
+ System.out.println(emsg);
+ LOGGER.error(emsg);
+
+ }
+ }
+ }
+ }
+
+ }
+
+ public void createPayloadForPut(Introspector nodeObj) throws IOException {
+
+ String entityJson = nodeObj.marshal(false);
+ ObjectMapper mapper = new ObjectMapper();
+
+ ObjectNode rootNode = (ObjectNode) mapper.readTree(entityJson);
+ rootNode.remove("resource-version");
+
+ bw.newLine();
+ bw.write(rootNode.toString());
+ bw.newLine();
+ }
+
+ public void createPayloadForDmaap(Vertex node, Introspector nodeObj)
+ throws AAIException, UnsupportedEncodingException {
+
+ DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, "sourceOfTruth");
+
+ URI uri = serializer.getURIForVertex(node);
+
+ String sourceOfTruth = "";
+ HashMap<String, Introspector> relatedVertices = new HashMap<>();
+ List<Vertex> vertexChain = dbEngine.getQueryEngine().findParents(node);
+
+ for (Vertex vertex : vertexChain) {
+ try {
+
+ Introspector vertexObj = serializer.getVertexProperties(vertex);
+
+ relatedVertices.put(vertexObj.getObjectId(), vertexObj);
+ } catch (AAIUnknownObjectException e) {
+ LOGGER.warn("Unable to get vertex properties, partial list of related vertices returned");
+ }
+
+ }
+
+ String transactionId = "TXID";
+ createNotificationEvent(transactionId, sourceOfTruth, uri, nodeObj, relatedVertices);
+
+ }
+
+ public void createNotificationEvent(String transactionId, String sourceOfTruth, URI uri, Introspector obj,
+ Map<String, Introspector> relatedObjects) throws AAIException, UnsupportedEncodingException {
+
+ String action = "CREATE";
+ final Introspector notificationEvent = loader.introspectorFromName("notification-event");
+
+ try {
+ Introspector eventHeader = loader.introspectorFromName("notification-event-header");
+ URIToObject parser = new URIToObject(loader, uri, (HashMap) relatedObjects);
+
+ String entityLink = urlBase + version + uri;
+
+ notificationEvent.setValue("cambria-partition", "AAI");
+
+ eventHeader.setValue("entity-link", entityLink);
+ eventHeader.setValue("action", action);
+ eventHeader.setValue("entity-type", obj.getDbName());
+ eventHeader.setValue("top-entity-type", parser.getTopEntityName());
+ eventHeader.setValue("source-name", sourceOfTruth);
+ eventHeader.setValue("version", version.toString());
+ eventHeader.setValue("id", transactionId);
+ eventHeader.setValue("event-type", "AAI-BASELINE");
+ if (eventHeader.getValue("domain") == null) {
+ eventHeader.setValue("domain", AAIConfig.get("aai.notificationEvent.default.domain", "UNK"));
+ }
+
+ if (eventHeader.getValue("sequence-number") == null) {
+ eventHeader.setValue("sequence-number",
+ AAIConfig.get("aai.notificationEvent.default.sequenceNumber", "UNK"));
+ }
+
+ if (eventHeader.getValue("severity") == null) {
+ eventHeader.setValue("severity", AAIConfig.get("aai.notificationEvent.default.severity", "UNK"));
+ }
+
+ if (eventHeader.getValue("id") == null) {
+ eventHeader.setValue("id", genDate2() + "-" + UUID.randomUUID().toString());
+
+ }
+
+ if (eventHeader.getValue("timestamp") == null) {
+ eventHeader.setValue("timestamp", genDate());
+ }
+
+ List<Object> parentList = parser.getParentList();
+ parentList.clear();
+
+ if (!parser.getTopEntity().equals(parser.getEntity())) {
+ Introspector child;
+ String json = obj.marshal(false);
+ child = parser.getLoader().unmarshal(parser.getEntity().getName(), json);
+ parentList.add(child.getUnderlyingObject());
+ }
+
+ final Introspector eventObject;
+
+ String json = "";
+ if (parser.getTopEntity().equals(parser.getEntity())) {
+ json = obj.marshal(false);
+ eventObject = loader.unmarshal(obj.getName(), json);
+ } else {
+ json = parser.getTopEntity().marshal(false);
+
+ eventObject = loader.unmarshal(parser.getTopEntity().getName(), json);
+ }
+ notificationEvent.setValue("event-header", eventHeader.getUnderlyingObject());
+ notificationEvent.setValue("entity", eventObject.getUnderlyingObject());
+
+ String entityJson = notificationEvent.marshal(false);
+
+ bw.newLine();
+ bw.write(entityJson);
+
+ } catch (AAIUnknownObjectException e) {
+ LOGGER.error("Fatal error - notification-event-header object not found!");
+ } catch (Exception e) {
+ LOGGER.error("Unmarshalling error occurred while generating Notification " + LogFormatTools.getStackTop(e));
+ }
+ }
+
+ private void closeGraph() {
+ inMemGraph.getGraph().tx().rollback();
+ inMemGraph.getGraph().close();
+ }
+
+ public static String genDate() {
+ Date date = new Date();
+ DateFormat formatter = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");
+ return formatter.format(date);
+ }
+
+ public static String genDate2() {
+ Date date = new Date();
+ DateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");
+ return formatter.format(date);
+ }
+
+ private void validateFile(String filename) {
+ File f = new File(filename);
+ if (!f.exists()) {
+ String emsg = "File " + filename + " could not be found.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ } else if (!f.canRead()) {
+ String emsg = "File " + filename + " could not be read.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ } else if (f.length() == 0) {
+ String emsg = "File " + filename + " had no data.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ }
+
+ private InputStream validateMultipleSnapshots(String filenamePrefix) {
+ if (filenamePrefix == null || filenamePrefix.length() == 0) {
+ String emsg = "No snapshot path was provided.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ String targetDir = ".";
+ int lastSeparator = filenamePrefix.lastIndexOf(File.separator);
+
+ LOGGER.info("File separator=[" + File.separator + "] lastSeparator=" + lastSeparator + " filenamePrefix="
+ + filenamePrefix);
+ if (lastSeparator >= 0) {
+ targetDir = filenamePrefix.substring(0, lastSeparator);
+ LOGGER.info("targetDir=" + targetDir);
+ }
+ if (targetDir.length() == 0) {
+ String emsg = "No snapshot directory was found in path:" + filenamePrefix;
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ String prefix = filenamePrefix.substring(lastSeparator + 1);
+ if (prefix == null || prefix.length() == 0) {
+ String emsg = "No snapshot file prefix was provided.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ long timeA = System.nanoTime();
+
+ ArrayList<File> snapFilesArr = new ArrayList<File>();
+ String thisSnapPrefix = prefix + ".P";
+ File fDir = new File(targetDir); // Snapshot directory
+ File[] allFilesArr = fDir.listFiles();
+ for (File snapFile : allFilesArr) {
+ String snapFName = snapFile.getName();
+ if (snapFName.startsWith(thisSnapPrefix)) {
+ snapFilesArr.add(snapFile);
+ }
+ }
+
+ if (snapFilesArr.isEmpty()) {
+ String fullFName = targetDir + AAIConstants.AAI_FILESEP + thisSnapPrefix;
+ String emsg = "Snapshot files " + fullFName + "* could not be found.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+
+ int fCount = snapFilesArr.size();
+ Iterator<File> fItr = snapFilesArr.iterator();
+ Vector<InputStream> inputStreamsV = new Vector<>();
+ for (int i = 0; i < fCount; i++) {
+ File f = snapFilesArr.get(i);
+ String fname = f.getName();
+ if (!f.canRead()) {
+ String emsg = "Snapshot file " + fname + " could not be read.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ } else if (f.length() == 0) {
+ String emsg = "Snapshot file " + fname + " had no data.";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ String fullFName = targetDir + AAIConstants.AAI_FILESEP + fname;
+ InputStream fis = null;
+ try {
+ fis = new FileInputStream(fullFName);
+ } catch (FileNotFoundException e) {
+ // should not happen at this point
+ String emsg = "Snapshot file " + fullFName + " could not be found";
+ LOGGER.error(emsg);
+ System.out.println(emsg);
+ taskExit();
+ }
+ inputStreamsV.add(fis);
+ }
+ // Now add inputStreams.elements() to the Vector,
+ InputStream sis = new SequenceInputStream(inputStreamsV.elements());
+ return (sis);
+ }
+
+ public InMemoryGraph getInMemGraph() {
+ return inMemGraph;
+ }
+
+ public void setInMemGraph(InMemoryGraph inMemGraph) {
+ this.inMemGraph = inMemGraph;
+ }
+}
+
+class CommandLineArgs {
+
+ @Parameter(names = "--help", help = true)
+ public boolean help;
+
+ @Parameter(names = "-d", description = "snapshot file to be loaded", required = true)
+ public String dataSnapshot;
+
+ @Parameter(names = "-s", description = "is schema to be enabled ", arity = 1)
+ public boolean schemaEnabled = true;
+
+ @Parameter(names = "-c", description = "location of configuration file")
+ public String config = "";
+
+ @Parameter(names = "-o", description = "output location")
+ public String output = "";
+
+ @Parameter(names = "-f", description = "format of output")
+ public String format = "PAYLOAD";
+
+ @Parameter(names = "-n", description = "Node input file")
+ public String nodePropertyFile = "";
+
+ @Parameter(names = "-m", description = "multipe snapshots or not", arity = 1)
+ public boolean isMultipleSnapshot = false;
+
+ @Parameter(names = "-i", description = "input filter configuration file")
+ public String inputFilterPropertyFile = "";
+
+ @Parameter(names = "-p", description = "Use the partial graph", arity = 1)
+ public boolean isPartialGraph = true;
+
+}
diff --git a/src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java b/src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java new file mode 100644 index 0000000..915db69 --- /dev/null +++ b/src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java @@ -0,0 +1,158 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.dbgen; + +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.io.Io; +import org.apache.tinkerpop.gremlin.structure.io.IoRegistry; +import org.apache.tinkerpop.gremlin.structure.io.Mapper; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONWriter; + +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Optional; +import java.util.function.Consumer; + +/** + * Constructs GraphSON IO implementations given a {@link Graph} and {@link IoRegistry}. Implementers of the {@link Graph} + * interfaces should see the {@link GraphSONMapper} for information on the expectations for the {@link IoRegistry}. + * + * @author Stephen Mallette (http://stephen.genoprime.com) + */ +public final class GraphSONPartialIO implements Io<GraphSONPartialReader.Builder, GraphSONWriter.Builder, GraphSONMapper.Builder> { + private final IoRegistry registry; + private final Graph graph; + private final Optional<Consumer<Mapper.Builder>> onMapper; + private final GraphSONVersion version; + + private GraphSONPartialIO(final Builder builder) { + this.registry = builder.registry; + this.graph = builder.graph; + this.onMapper = Optional.ofNullable(builder.onMapper); + this.version = builder.version; + } + + /** + * {@inheritDoc} + */ + @Override + public GraphSONPartialReader.Builder reader() { + return GraphSONPartialReader.build().mapper(mapper().create()); + } + + /** + * {@inheritDoc} + */ + @Override + public GraphSONWriter.Builder writer() { + return GraphSONWriter.build().mapper(mapper().create()); + } + + /** + * {@inheritDoc} + */ + @Override + public GraphSONMapper.Builder mapper() { + final GraphSONMapper.Builder builder = (null == this.registry) ? + GraphSONMapper.build().version(version) : GraphSONMapper.build().version(version).addRegistry(this.registry); + onMapper.ifPresent(c -> c.accept(builder)); + return builder; + } + + /** + * {@inheritDoc} + */ + @Override + public void writeGraph(final String file) throws IOException { + try (final OutputStream out = new FileOutputStream(file)) { + writer().create().writeGraph(out, graph); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void readGraph(final String file) throws IOException { + try (final InputStream in = new FileInputStream(file)) { + reader().create().readGraph(in, graph); + } + } + + /** + * Create a new builder using the default version of GraphSON. + */ + public static Io.Builder<GraphSONPartialIO> build() { + return build(GraphSONVersion.V1_0); + } + + /** + * Create a new builder using the specified version of GraphSON. + */ + public static Io.Builder<GraphSONPartialIO> build(final GraphSONVersion version) { + return new Builder(version); + } + + public final static class Builder implements Io.Builder<GraphSONPartialIO> { + + private IoRegistry registry = null; + private Graph graph; + private Consumer<Mapper.Builder> onMapper = null; + private final GraphSONVersion version; + + Builder(final GraphSONVersion version) { + this.version = version; + } + + /** + * @deprecated As of release 3.2.2, replaced by {@link #onMapper(Consumer)}. + */ + @Deprecated + @Override + public Io.Builder<GraphSONPartialIO> registry(final IoRegistry registry) { + this.registry = registry; + return this; + } + + @Override + public Io.Builder<? extends Io> onMapper(final Consumer<Mapper.Builder> onMapper) { + this.onMapper = onMapper; + return this; + } + + @Override + public Io.Builder<GraphSONPartialIO> graph(final Graph g) { + this.graph = g; + return this; + } + + @Override + public GraphSONPartialIO create() { + if (null == graph) throw new IllegalArgumentException("The graph argument was not specified"); + return new GraphSONPartialIO(this); + } + } +} diff --git a/src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java b/src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java new file mode 100644 index 0000000..ebe2180 --- /dev/null +++ b/src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java @@ -0,0 +1,354 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.dbgen; + +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.T; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.apache.tinkerpop.gremlin.structure.io.GraphReader; +import org.apache.tinkerpop.gremlin.structure.io.GraphWriter; +import org.apache.tinkerpop.gremlin.structure.io.Mapper; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion; +import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoWriter; +import org.apache.tinkerpop.gremlin.structure.util.Attachable; +import org.apache.tinkerpop.gremlin.structure.util.Host; +import org.apache.tinkerpop.gremlin.structure.util.star.StarGraph; +import org.apache.tinkerpop.gremlin.util.function.FunctionUtils; +import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils; +import org.apache.tinkerpop.shaded.jackson.core.type.TypeReference; +import org.apache.tinkerpop.shaded.jackson.databind.JsonNode; +import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; +import org.apache.tinkerpop.shaded.jackson.databind.node.JsonNodeType; +import org.onap.aai.dbmap.InMemoryGraph; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; +import java.util.stream.Stream; + +/** + * This is a Wrapper around the GraphsonReader class + * The idea is to rewrite methods that are customized for A&AI + * GraphsonReader is a final class . hence the use of the Wrapper + * instead of inheriting-overwriting + * + * + */ +public final class GraphSONPartialReader implements GraphReader { + private final ObjectMapper mapper ; + private final long batchSize ; + private final GraphSONVersion version ; + private boolean unwrapAdjacencyList = false; + private final GraphSONReader reader; + + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(InMemoryGraph.class); + + final TypeReference<Map<String, Object>> mapTypeReference = new TypeReference<Map<String, Object>>() { + }; + + private GraphSONPartialReader(final Builder builder) { + mapper = builder.mapper.createMapper(); + batchSize = builder.batchSize; + unwrapAdjacencyList = builder.unwrapAdjacencyList; + version = ((GraphSONMapper)builder.mapper).getVersion(); + reader = GraphSONReader.build().create(); + } + + /** + * Read data into a {@link Graph} from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or + * {@code writeVertices} methods or by {@link GryoWriter#writeGraph(OutputStream, Graph)}. + * + * @param inputStream a stream containing an entire graph of vertices and edges as defined by the accompanying + * {@link GraphSONWriter#writeGraph(OutputStream, Graph)}. + * @param graphToWriteTo the graph to write to when reading from the stream. + */ + @Override + public void readGraph(final InputStream inputStream, final Graph graphToWriteTo) throws IOException { + // dual pass - create all vertices and store to cache the ids. then create edges. as long as we don't + // have vertex labels in the output we can't do this single pass + LOGGER.info("Read the Partial Graph"); + final Map<StarGraph.StarVertex,Vertex> cache = new HashMap<>(); + final AtomicLong counter = new AtomicLong(0); + + final boolean supportsTx = graphToWriteTo.features().graph().supportsTransactions(); + final Graph.Features.EdgeFeatures edgeFeatures = graphToWriteTo.features().edge(); + + readVertexStrings(inputStream).<Vertex>map(FunctionUtils.wrapFunction(line -> readVertex(new ByteArrayInputStream(line.getBytes()), null, null, Direction.IN))).forEach(vertex -> { + try{ + final Attachable<Vertex> attachable = (Attachable<Vertex>) vertex; + cache.put((StarGraph.StarVertex) attachable.get(), attachable.attach(Attachable.Method.create(graphToWriteTo))); + if (supportsTx && counter.incrementAndGet() % batchSize == 0) + graphToWriteTo.tx().commit(); + } + catch(Exception ex){ + LOGGER.info("Error in reading vertex from graphson"+vertex.toString()); + } + }); + + cache.entrySet().forEach(kv -> kv.getKey().edges(Direction.IN).forEachRemaining(e -> { + try{ + // can't use a standard Attachable attach method here because we have to use the cache for those + // graphs that don't support userSuppliedIds on edges. note that outVertex/inVertex methods return + // StarAdjacentVertex whose equality should match StarVertex. + final Vertex cachedOutV = cache.get(e.outVertex()); + final Vertex cachedInV = cache.get(e.inVertex()); + + if(cachedOutV != null && cachedInV != null){ + + final Edge newEdge = edgeFeatures.willAllowId(e.id()) ? cachedOutV.addEdge(e.label(), cachedInV, T.id, e.id()) : cachedOutV.addEdge(e.label(), cachedInV); + e.properties().forEachRemaining(p -> newEdge.property(p.key(), p.value())); + } + else{ + LOGGER.debug("Ghost edges from "+ cachedOutV + " to "+ cachedInV); + + } + if (supportsTx && counter.incrementAndGet() % batchSize == 0) + graphToWriteTo.tx().commit(); + } + catch(Exception ex){ + LOGGER.info("Error in writing vertex into graph"+e.toString()); + } + })); + + if (supportsTx) graphToWriteTo.tx().commit(); + } + + /** + * Read {@link Vertex} objects from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or + * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}. + * + * @param inputStream a stream containing at least one {@link Vertex} as defined by the accompanying + * {@link GraphWriter#writeVertices(OutputStream, Iterator, Direction)} or + * {@link GraphWriter#writeVertices(OutputStream, Iterator)} methods. + * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object. + * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object. + * @param attachEdgesOfThisDirection only edges of this direction are passed to the {@code edgeMaker}. + */ + @Override + public Iterator<Vertex> readVertices(final InputStream inputStream, + final Function<Attachable<Vertex>, Vertex> vertexAttachMethod, + final Function<Attachable<Edge>, Edge> edgeAttachMethod, + final Direction attachEdgesOfThisDirection) throws IOException { + // return readVertexStrings(inputStream).<Vertex>map(FunctionUtils.wrapFunction(line -> readVertex(new ByteArrayInputStream(line.getBytes()), vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection))).iterator(); + return reader.readVertices(inputStream, vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection); + + } + + /** + * Read a {@link Vertex} from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or + * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}. + * + * @param inputStream a stream containing at least a single vertex as defined by the accompanying + * {@link GraphWriter#writeVertex(OutputStream, Vertex)}. + * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object. + */ + @Override + public Vertex readVertex(final InputStream inputStream, final Function<Attachable<Vertex>, Vertex> vertexAttachMethod) throws IOException { + return reader.readVertex(inputStream, vertexAttachMethod); + } + + /** + * Read a {@link Vertex} from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or + * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}. + * + * @param inputStream a stream containing at least one {@link Vertex} as defined by the accompanying + * {@link GraphWriter#writeVertices(OutputStream, Iterator, Direction)} method. + * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object. + * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object. + * @param attachEdgesOfThisDirection only edges of this direction are passed to the {@code edgeMaker}. + */ + @Override + public Vertex readVertex(final InputStream inputStream, + final Function<Attachable<Vertex>, Vertex> vertexAttachMethod, + final Function<Attachable<Edge>, Edge> edgeAttachMethod, + final Direction attachEdgesOfThisDirection) throws IOException { + + return reader.readVertex(inputStream, vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection); + } + + /** + * Read an {@link Edge} from output generated by {@link GraphSONWriter#writeEdge(OutputStream, Edge)} or via + * an {@link Edge} passed to {@link GraphSONWriter#writeObject(OutputStream, Object)}. + * + * @param inputStream a stream containing at least one {@link Edge} as defined by the accompanying + * {@link GraphWriter#writeEdge(OutputStream, Edge)} method. + * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object. + */ + @Override + public Edge readEdge(final InputStream inputStream, final Function<Attachable<Edge>, Edge> edgeAttachMethod) throws IOException { + /*if (version == GraphSONVersion.V1_0) { + final Map<String, Object> edgeData = mapper.readValue(inputStream, mapTypeReference); + + final Map<String, Object> edgeProperties = edgeData.containsKey(GraphSONTokens.PROPERTIES) ? + (Map<String, Object>) edgeData.get(GraphSONTokens.PROPERTIES) : Collections.EMPTY_MAP; + final DetachedEdge edge = new DetachedEdge(edgeData.get(GraphSONTokens.ID), + edgeData.get(GraphSONTokens.LABEL).toString(), + edgeProperties, + Pair.with(edgeData.get(GraphSONTokens.OUT), edgeData.get(GraphSONTokens.OUT_LABEL).toString()), + Pair.with(edgeData.get(GraphSONTokens.IN), edgeData.get(GraphSONTokens.IN_LABEL).toString())); + + return edgeAttachMethod.apply(edge); + } else { + return edgeAttachMethod.apply((DetachedEdge) mapper.readValue(inputStream, Edge.class)); + }*/ + return reader.readEdge(inputStream, edgeAttachMethod); + } + + /** + * Read a {@link VertexProperty} from output generated by + * {@link GraphSONWriter#writeVertexProperty(OutputStream, VertexProperty)} or via an {@link VertexProperty} passed + * to {@link GraphSONWriter#writeObject(OutputStream, Object)}. + * + * @param inputStream a stream containing at least one {@link VertexProperty} as written by the accompanying + * {@link GraphWriter#writeVertexProperty(OutputStream, VertexProperty)} method. + * @param vertexPropertyAttachMethod a function that creates re-attaches a {@link VertexProperty} to a + * {@link Host} object. + */ + @Override + public VertexProperty readVertexProperty(final InputStream inputStream, + final Function<Attachable<VertexProperty>, VertexProperty> vertexPropertyAttachMethod) throws IOException { + /*if (version == GraphSONVersion.V1_0) { + final Map<String, Object> vpData = mapper.readValue(inputStream, mapTypeReference); + final Map<String, Object> metaProperties = (Map<String, Object>) vpData.get(GraphSONTokens.PROPERTIES); + final DetachedVertexProperty vp = new DetachedVertexProperty(vpData.get(GraphSONTokens.ID), + vpData.get(GraphSONTokens.LABEL).toString(), + vpData.get(GraphSONTokens.VALUE), metaProperties); + return vertexPropertyAttachMethod.apply(vp); + } else { + return vertexPropertyAttachMethod.apply((DetachedVertexProperty) mapper.readValue(inputStream, VertexProperty.class)); + }*/ + return reader.readVertexProperty(inputStream, vertexPropertyAttachMethod); + } + + /** + * Read a {@link Property} from output generated by {@link GraphSONWriter#writeProperty(OutputStream, Property)} or + * via an {@link Property} passed to {@link GraphSONWriter#writeObject(OutputStream, Object)}. + * + * @param inputStream a stream containing at least one {@link Property} as written by the accompanying + * {@link GraphWriter#writeProperty(OutputStream, Property)} method. + * @param propertyAttachMethod a function that creates re-attaches a {@link Property} to a {@link Host} object. + */ + @Override + public Property readProperty(final InputStream inputStream, + final Function<Attachable<Property>, Property> propertyAttachMethod) throws IOException { + /*if (version == GraphSONVersion.V1_0) { + final Map<String, Object> propertyData = mapper.readValue(inputStream, mapTypeReference); + final DetachedProperty p = new DetachedProperty(propertyData.get(GraphSONTokens.KEY).toString(), propertyData.get(GraphSONTokens.VALUE)); + return propertyAttachMethod.apply(p); + } else { + return propertyAttachMethod.apply((DetachedProperty) mapper.readValue(inputStream, Property.class)); + }*/ + return reader.readProperty(inputStream, propertyAttachMethod); + } + + /** + * {@inheritDoc} + */ + @Override + public <C> C readObject(final InputStream inputStream, final Class<? extends C> clazz) throws IOException { + return mapper.readValue(inputStream, clazz); + } + + private Stream<String> readVertexStrings(final InputStream inputStream) throws IOException { + if (unwrapAdjacencyList) { + final JsonNode root = mapper.readTree(inputStream); + final JsonNode vertices = root.get(GraphSONTokens.VERTICES); + if (!vertices.getNodeType().equals(JsonNodeType.ARRAY)) throw new IOException(String.format("The '%s' key must be an array", GraphSONTokens.VERTICES)); + return IteratorUtils.stream(vertices.elements()).map(Object::toString); + } else { + final BufferedReader br = new BufferedReader(new InputStreamReader(inputStream)); + return br.lines(); + } + + } + + + public static Builder build() { + return new Builder(); + } + + public final static class Builder implements ReaderBuilder<GraphSONPartialReader> { + private long batchSize = 10000; + + private Mapper<ObjectMapper> mapper = GraphSONMapper.build().create(); + private boolean unwrapAdjacencyList = false; + + + private Builder() {} + + /** + * Number of mutations to perform before a commit is executed when using + * {@link GraphSONPartialReader#readGraph(InputStream, Graph)}. + */ + public Builder batchSize(final long batchSize) { + this.batchSize = batchSize; + return this; + } + + /** + * Override all of the {@link GraphSONMapper} builder + * options with this mapper. If this value is set to something other than null then that value will be + * used to construct the writer. + */ + public Builder mapper(final Mapper<ObjectMapper> mapper) { + this.mapper = mapper; + return this; + } + + /** + * If the adjacency list is wrapped in a JSON object, as is done when writing a graph with + * {@link GraphSONWriter.Builder#wrapAdjacencyList} set to {@code true}, this setting needs to be set to + * {@code true} to properly read it. By default, this value is {@code false} and the adjacency list is + * simply read as line delimited vertices. + * <p/> + * By setting this value to {@code true}, the generated JSON is no longer "splittable" by line and thus not + * suitable for OLAP processing. Furthermore, reading this format of the JSON with + * {@link GraphSONPartialReader#readGraph(InputStream, Graph)} or + * {@link GraphSONPartialReader#readVertices(InputStream, Function, Function, Direction)} requires that the + * entire JSON object be read into memory, so it is best saved for "small" graphs. + */ + public Builder unwrapAdjacencyList(final boolean unwrapAdjacencyList) { + this.unwrapAdjacencyList = unwrapAdjacencyList; + return this; + } + + public GraphSONPartialReader create() { + return new GraphSONPartialReader(this); + } + } +} diff --git a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java index c0f8ee9..9fc18eb 100644 --- a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java +++ b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java @@ -19,28 +19,31 @@ */ package org.onap.aai.dbgen.schemamod; -import java.util.Properties; - +import com.att.eelf.configuration.Configuration; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.DBConnectionType; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Loader; import org.onap.aai.introspection.LoaderFactory; import org.onap.aai.introspection.ModelType; -import org.onap.aai.setup.SchemaVersions; -import org.onap.aai.setup.SchemaVersion; import org.onap.aai.logging.ErrorLogHelper; -import org.onap.aai.serialization.engines.QueryStyle; +import org.onap.aai.logging.LoggingContext; import org.onap.aai.serialization.engines.JanusGraphDBEngine; +import org.onap.aai.serialization.engines.QueryStyle; import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersion; +import org.onap.aai.setup.SchemaVersions; import org.onap.aai.util.AAIConfig; import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.ExceptionTranslator; import org.onap.aai.util.UniquePropertyCheck; import org.slf4j.MDC; - -import com.att.eelf.configuration.Configuration; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import java.util.Properties; + public class SchemaMod { private final LoaderFactory loaderFactory; @@ -159,13 +162,25 @@ public class SchemaMod { logger.info(msg); } - public static void main(String[] args) { - - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); + public static void main(String[] args) throws AAIException { + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + System.out.println("Problems running SchemaMod "+aai.getMessage()); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); SchemaMod schemaMod = new SchemaMod(loaderFactory, schemaVersions); diff --git a/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java b/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java index 616ff02..b3faec8 100644 --- a/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java +++ b/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java @@ -1,288 +1,339 @@ -/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.onap.aai.migration;
-
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import org.apache.tinkerpop.gremlin.structure.Edge;
-import org.apache.tinkerpop.gremlin.structure.Property;
-import org.apache.tinkerpop.gremlin.structure.Direction;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
-import org.javatuples.Pair;
-import org.onap.aai.db.props.AAIProperties;
-import org.onap.aai.edges.EdgeIngestor;
-import org.onap.aai.introspection.LoaderFactory;
-import org.onap.aai.serialization.db.EdgeSerializer;
-import org.onap.aai.serialization.engines.TransactionalGraphEngine;
-import org.onap.aai.setup.SchemaVersions;
-
-/**
- * A migration template for "swinging" edges that terminate on an old-node to a new target node.
- * That is, given an oldNode and a newNode we will swing edges that terminate on the
- * oldNode and terminate them on the newNode (actually we drop the old edges and add new ones).
- *
- *
- * We allow the passing of some parameters to restrict what edges get swung over:
- * > otherEndNodeTypeRestriction: only swing edges that terminate on the oldNode if the
- * node at the other end of the edge is of this nodeType.
- * > edgeLabelRestriction: Only swing edges that have this edgeLabel
- * > edgeDirectionRestriction: Only swing edges that go this direction (from the oldNode)
- * this is a required parameter. valid values are: BOTH, IN, OUT
- *
- */
-@MigrationPriority(0)
-@MigrationDangerRating(1)
-public abstract class EdgeSwingMigrator extends Migrator {
-
- private boolean success = true;
- private String nodeTypeRestriction = null;
- private String edgeLabelRestriction = null;
- private String edgeDirRestriction = null;
- private List<Pair<Vertex, Vertex>> nodePairList;
-
-
- public EdgeSwingMigrator(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
- super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
- }
-
-
- /**
- * Do not override this method as an inheritor of this class
- */
- @Override
- public void run() {
- executeModifyOperation();
- cleanupAsAppropriate(this.nodePairList);
- }
-
- /**
- * This is where inheritors should add their logic
- */
- protected void executeModifyOperation() {
-
- try {
- this.nodeTypeRestriction = this.getNodeTypeRestriction();
- this.edgeLabelRestriction = this.getEdgeLabelRestriction();
- this.edgeDirRestriction = this.getEdgeDirRestriction();
- nodePairList = this.getAffectedNodePairs();
- for (Pair<Vertex, Vertex> nodePair : nodePairList) {
- Vertex fromNode = nodePair.getValue0();
- Vertex toNode = nodePair.getValue1();
- this.swingEdges(fromNode, toNode,
- this.nodeTypeRestriction,this.edgeLabelRestriction,this.edgeDirRestriction);
- }
- } catch (Exception e) {
- logger.error("error encountered", e);
- success = false;
- }
- }
-
-
- protected void swingEdges(Vertex oldNode, Vertex newNode, String nodeTypeRestr, String edgeLabelRestr, String edgeDirRestr) {
- try {
- // If the old and new Vertices aren't populated, throw an exception
- if( oldNode == null ){
- logger.info ( "null oldNode passed to swingEdges() ");
- success = false;
- return;
- }
- else if( newNode == null ){
- logger.info ( "null newNode passed to swingEdges() ");
- success = false;
- return;
- }
- else if( edgeDirRestr == null ||
- (!edgeDirRestr.equals("BOTH")
- && !edgeDirRestr.equals("IN")
- && !edgeDirRestr.equals("OUT") )
- ){
- logger.info ( "invalid direction passed to swingEdges(). valid values are BOTH/IN/OUT ");
- success = false;
- return;
- }
- else if( edgeLabelRestr != null
- && (edgeLabelRestr.trim().equals("none") || edgeLabelRestr.trim().equals("")) ){
- edgeLabelRestr = null;
- }
- else if( nodeTypeRestr == null || nodeTypeRestr.trim().equals("") ){
- nodeTypeRestr = "none";
- }
-
- String oldNodeType = oldNode.value(AAIProperties.NODE_TYPE);
- String oldUri = oldNode.<String> property("aai-uri").isPresent() ? oldNode.<String> property("aai-uri").value() : "URI Not present";
-
- String newNodeType = newNode.value(AAIProperties.NODE_TYPE);
- String newUri = newNode.<String> property("aai-uri").isPresent() ? newNode.<String> property("aai-uri").value() : "URI Not present";
-
- // If the nodeTypes don't match, throw an error
- if( !oldNodeType.equals(newNodeType) ){
- logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +
- newNodeType + "] node. ");
- success = false;
- return;
- }
-
- // Find and migrate any applicable OUT edges.
- if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("OUT") ){
- Iterator <Edge> edgeOutIter = null;
- if( edgeLabelRestr == null ) {
- edgeOutIter = oldNode.edges(Direction.OUT);
- }
- else {
- edgeOutIter = oldNode.edges(Direction.OUT, edgeLabelRestr);
- }
-
- while( edgeOutIter.hasNext() ){
- Edge oldOutE = edgeOutIter.next();
- String eLabel = oldOutE.label();
- Vertex otherSideNode4ThisEdge = oldOutE.inVertex();
- String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
- if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){
- Iterator <Property<Object>> propsIter = oldOutE.properties();
- HashMap<String, String> propMap = new HashMap<String,String>();
- while( propsIter.hasNext() ){
- Property <Object> ep = propsIter.next();
- propMap.put(ep.key(), ep.value().toString());
- }
-
- String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present";
- logger.info ( "\nSwinging [" + eLabel + "] OUT edge. \n >> Unchanged side is ["
- + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType
- + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. ");
- // remove the old edge
- oldOutE.remove();
-
- // add the new edge with properties that match the edge that was deleted. We don't want to
- // change any edge properties - just swinging one end of the edge to a new node.
- // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are
- // adding from the newNode side.
- Edge newOutE = newNode.addEdge(eLabel, otherSideNode4ThisEdge);
-
- Iterator it = propMap.entrySet().iterator();
- while (it.hasNext()) {
- Map.Entry pair = (Map.Entry)it.next();
- newOutE.property(pair.getKey().toString(), pair.getValue().toString() );
- }
-
- }
- }
- }
-
- // Find and migrate any applicable IN edges.
- if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("IN") ){
- Iterator <Edge> edgeInIter = null;
- if( edgeLabelRestr == null ) {
- edgeInIter = oldNode.edges(Direction.IN);
- }
- else {
- edgeInIter = oldNode.edges(Direction.IN, edgeLabelRestr);
- }
-
- while( edgeInIter.hasNext() ){
- Edge oldInE = edgeInIter.next();
- String eLabel = oldInE.label();
- Vertex otherSideNode4ThisEdge = oldInE.outVertex();
- String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
- if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){
- Iterator <Property<Object>> propsIter = oldInE.properties();
- HashMap<String, String> propMap = new HashMap<String,String>();
- while( propsIter.hasNext() ){
- Property <Object> ep = propsIter.next();
- propMap.put(ep.key(), ep.value().toString());
- }
-
- String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present";
- logger.info ( "\nSwinging [" + eLabel + "] IN edge. \n >> Unchanged side is ["
- + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType
- + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. ");
-
- // remove the old edge
- oldInE.remove();
-
- // add the new edge with properties that match the edge that was deleted. We don't want to
- // change any edge properties - just swinging one end of the edge to a new node.
- // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are
- // adding from the node on the other-end of the original edge so we'll get
- // an IN-edge to the newNode.
- Edge newInE = otherSideNode4ThisEdge.addEdge(eLabel, newNode);
-
- Iterator it = propMap.entrySet().iterator();
- while (it.hasNext()) {
- Map.Entry pair = (Map.Entry)it.next();
- newInE.property(pair.getKey().toString(), pair.getValue().toString() );
- }
- }
- }
- }
-
- } catch (Exception e) {
- logger.error("error encountered", e);
- success = false;
- }
- }
-
- @Override
- public Status getStatus() {
- if (success) {
- return Status.SUCCESS;
- } else {
- return Status.FAILURE;
- }
- }
-
-
- /**
- * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate from json files
- * @return
- */
- public abstract List<Pair<Vertex, Vertex>> getAffectedNodePairs() ;
-
-
- /**
- * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
- * @return
- */
- public abstract String getNodeTypeRestriction() ;
-
-
- /**
- * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
- * @return
- */
- public abstract String getEdgeLabelRestriction() ;
-
- /**
- * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
- * @return
- */
- public abstract String getEdgeDirRestriction() ;
-
-
-
- /**
- * Cleanup (remove) the nodes that edges were moved off of if appropriate
- * @return
- */
- public abstract void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL);
-
+/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration; + + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.edges.enums.EdgeProperty; +import org.onap.aai.edges.enums.EdgeType; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + +/** + * A migration template for "swinging" edges that terminate on an old-node to a new target node. + * That is, given an oldNode and a newNode we will swing edges that terminate on the + * oldNode and terminate them on the newNode (actually we drop the old edges and add new ones). + * + * + * We allow the passing of some parameters to restrict what edges get swung over: + * > otherEndNodeTypeRestriction: only swing edges that terminate on the oldNode if the + * node at the other end of the edge is of this nodeType. + * > edgeLabelRestriction: Only swing edges that have this edgeLabel + * > edgeDirectionRestriction: Only swing edges that go this direction (from the oldNode) + * this is a required parameter. valid values are: BOTH, IN, OUT + * + */ +@MigrationPriority(0) +@MigrationDangerRating(1) +public abstract class EdgeSwingMigrator extends Migrator { + + private boolean success = true; + private String nodeTypeRestriction = null; + private String edgeLabelRestriction = null; + private String edgeDirRestriction = null; + private List<Pair<Vertex, Vertex>> nodePairList; + + + public EdgeSwingMigrator(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + + /** + * Do not override this method as an inheritor of this class + */ + @Override + public void run() { + executeModifyOperation(); + cleanupAsAppropriate(this.nodePairList); + } + + /** + * This is where inheritors should add their logic + */ + protected void executeModifyOperation() { + + try { + this.nodeTypeRestriction = this.getNodeTypeRestriction(); + this.edgeLabelRestriction = this.getEdgeLabelRestriction(); + this.edgeDirRestriction = this.getEdgeDirRestriction(); + nodePairList = this.getAffectedNodePairs(); + for (Pair<Vertex, Vertex> nodePair : nodePairList) { + Vertex fromNode = nodePair.getValue0(); + Vertex toNode = nodePair.getValue1(); + this.swingEdges(fromNode, toNode, + this.nodeTypeRestriction,this.edgeLabelRestriction,this.edgeDirRestriction); + } + } catch (Exception e) { + logger.error("error encountered", e); + success = false; + } + } + + + protected void swingEdges(Vertex oldNode, Vertex newNode, String nodeTypeRestr, String edgeLabelRestr, String edgeDirRestr) { + try { + // If the old and new Vertices aren't populated, throw an exception + if( oldNode == null ){ + logger.info ( "null oldNode passed to swingEdges() "); + success = false; + return; + } + else if( newNode == null ){ + logger.info ( "null newNode passed to swingEdges() "); + success = false; + return; + } + else if( edgeDirRestr == null || + (!edgeDirRestr.equals("BOTH") + && !edgeDirRestr.equals("IN") + && !edgeDirRestr.equals("OUT") ) + ){ + logger.info ( "invalid direction passed to swingEdges(). valid values are BOTH/IN/OUT "); + success = false; + return; + } + else if( edgeLabelRestr != null + && (edgeLabelRestr.trim().equals("none") || edgeLabelRestr.trim().equals("")) ){ + edgeLabelRestr = null; + } + else if( nodeTypeRestr == null || nodeTypeRestr.trim().equals("") ){ + nodeTypeRestr = "none"; + } + + String oldNodeType = oldNode.value(AAIProperties.NODE_TYPE); + String oldUri = oldNode.<String> property("aai-uri").isPresent() ? oldNode.<String> property("aai-uri").value() : "URI Not present"; + + String newNodeType = newNode.value(AAIProperties.NODE_TYPE); + String newUri = newNode.<String> property("aai-uri").isPresent() ? newNode.<String> property("aai-uri").value() : "URI Not present"; + + // If the nodeTypes don't match, throw an error + if( !oldNodeType.equals(newNodeType) ){ + logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" + + newNodeType + "] node. "); + success = false; + return; + } + + // Find and migrate any applicable OUT edges. + if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("OUT") ){ + Iterator <Edge> edgeOutIter = null; + Iterator <Edge> newNodeEdgeOutIter = null; + + if( edgeLabelRestr == null ) { + edgeOutIter = oldNode.edges(Direction.OUT); + newNodeEdgeOutIter = newNode.edges(Direction.OUT); + + } + else { + edgeOutIter = oldNode.edges(Direction.OUT, edgeLabelRestr); + newNodeEdgeOutIter = newNode.edges(Direction.OUT, edgeLabelRestr); + } + + List<Vertex> newNodeOtherEndVertexList = new ArrayList<Vertex>(); + while (newNodeEdgeOutIter.hasNext()){ + Edge newNodeOutE = newNodeEdgeOutIter.next(); + Vertex otherSideNode4ThisEdgeOfNewNode = newNodeOutE.inVertex(); + newNodeOtherEndVertexList.add(otherSideNode4ThisEdgeOfNewNode); + } + + while( edgeOutIter.hasNext() ){ + Edge oldOutE = edgeOutIter.next(); + String eLabel = oldOutE.label(); + Vertex otherSideNode4ThisEdge = oldOutE.inVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){ + Iterator <Property<Object>> propsIter = oldOutE.properties(); + HashMap<String, String> propMap = new HashMap<String,String>(); + while( propsIter.hasNext() ){ + Property <Object> ep = propsIter.next(); + propMap.put(ep.key(), ep.value().toString()); + } + + String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; + logger.info ( "\nSwinging [" + eLabel + "] OUT edge. \n >> Unchanged side is [" + + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType + + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. "); + // remove the old edge + oldOutE.remove(); + + // add the new edge with properties that match the edge that was deleted. We don't want to + // change any edge properties - just swinging one end of the edge to a new node. + // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are + // adding from the newNode side. + + EdgeType edgeType = getEdgeType(propMap); + if (edgeType != null && !newNodeOtherEndVertexList.contains(otherSideNode4ThisEdge)){ +// Edge newOutE = newNode.addEdge(eLabel, otherSideNode4ThisEdge); + Edge newOutE = createEdgeIfPossible(edgeType, newNode, otherSideNode4ThisEdge); + if (newOutE != null){ + Iterator it = propMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry pair = (Map.Entry)it.next(); + newOutE.property(pair.getKey().toString(), pair.getValue().toString() ); + } + }else { + logger.info("\n Edge was not swung due to Multiplicity Rule Violation..."); + } + } + } + } + } + + // Find and migrate any applicable IN edges. + if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("IN") ){ + Iterator <Edge> edgeInIter = null; + Iterator <Edge> newNodeEdgeOutIter = null; + if( edgeLabelRestr == null ) { + edgeInIter = oldNode.edges(Direction.IN); + newNodeEdgeOutIter = newNode.edges(Direction.IN); + } + else { + edgeInIter = oldNode.edges(Direction.IN, edgeLabelRestr); + newNodeEdgeOutIter = newNode.edges(Direction.IN, edgeLabelRestr); + } + + List<Vertex> newNodeOtherEndVertexList = new ArrayList<Vertex>(); + while (newNodeEdgeOutIter.hasNext()){ + Edge newNodeOutE = newNodeEdgeOutIter.next(); + Vertex otherSideNode4ThisEdgeOfNewNode = newNodeOutE.outVertex(); + newNodeOtherEndVertexList.add(otherSideNode4ThisEdgeOfNewNode); + } + + while( edgeInIter.hasNext() ){ + Edge oldInE = edgeInIter.next(); + String eLabel = oldInE.label(); + Vertex otherSideNode4ThisEdge = oldInE.outVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){ + Iterator <Property<Object>> propsIter = oldInE.properties(); + HashMap<String, String> propMap = new HashMap<String,String>(); + while( propsIter.hasNext() ){ + Property <Object> ep = propsIter.next(); + propMap.put(ep.key(), ep.value().toString()); + } + + String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; + logger.info ( "\nSwinging [" + eLabel + "] IN edge. \n >> Unchanged side is [" + + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType + + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. "); + + // remove the old edge + oldInE.remove(); + + // add the new edge with properties that match the edge that was deleted. We don't want to + // change any edge properties - just swinging one end of the edge to a new node. + // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are + // adding from the node on the other-end of the original edge so we'll get + // an IN-edge to the newNode. + EdgeType edgeType = getEdgeType(propMap); + if (edgeType != null && !newNodeOtherEndVertexList.contains(otherSideNode4ThisEdge)){ +// Edge newInE = otherSideNode4ThisEdge.addEdge(eLabel, newNode); + Edge newInE = createEdgeIfPossible(edgeType, otherSideNode4ThisEdge, newNode); + if (newInE != null){ + Iterator it = propMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry pair = (Map.Entry)it.next(); + newInE.property(pair.getKey().toString(), pair.getValue().toString() ); + } + } else { + logger.info("\t Edge was not swung due to Multiplicity Rule Violation..."); + } + } + } + } + } + + } catch (Exception e) { + logger.error("error encountered", e); + success = false; + } + } + + private EdgeType getEdgeType(HashMap edgePropMap) { + EdgeType type = null; + String containsValue = edgePropMap.get(EdgeProperty.CONTAINS.toString()).toString(); + if ("NONE".equalsIgnoreCase(containsValue)){ + type = EdgeType.COUSIN; + } else { + type = EdgeType.TREE; + } + return type; + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + + /** + * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate from json files + * @return + */ + public abstract List<Pair<Vertex, Vertex>> getAffectedNodePairs() ; + + + /** + * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use + * @return + */ + public abstract String getNodeTypeRestriction() ; + + + /** + * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use + * @return + */ + public abstract String getEdgeLabelRestriction() ; + + /** + * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use + * @return + */ + public abstract String getEdgeDirRestriction() ; + + + + /** + * Cleanup (remove) the nodes that edges were moved off of if appropriate + * @return + */ + public abstract void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL); + }
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/MigrationController.java b/src/main/java/org/onap/aai/migration/MigrationController.java index 0e65745..ecc0434 100644 --- a/src/main/java/org/onap/aai/migration/MigrationController.java +++ b/src/main/java/org/onap/aai/migration/MigrationController.java @@ -19,18 +19,22 @@ */ package org.onap.aai.migration; -import java.util.UUID; - +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LoggingContext; import org.onap.aai.logging.LoggingContext.StatusCode; import org.onap.aai.serialization.db.EdgeSerializer; import org.onap.aai.setup.SchemaVersions; import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.ExceptionTranslator; import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import java.util.UUID; + /** * Wrapper class to allow {@link org.onap.aai.migration.MigrationControllerInternal MigrationControllerInternal} * to be run from a shell script @@ -43,7 +47,7 @@ public class MigrationController { * @param args * the arguments */ - public static void main(String[] args) { + public static void main(String[] args) throws AAIException { LoggingContext.init(); LoggingContext.partnerName("Migration"); @@ -55,11 +59,23 @@ public class MigrationController { LoggingContext.statusCode(StatusCode.COMPLETE); LoggingContext.responseCode(LoggingContext.SUCCESS); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); - + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + System.out.println("Problems running tool "+aai.getMessage()); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); EdgeIngestor edgeIngestor = ctx.getBean(EdgeIngestor.class); EdgeSerializer edgeSerializer = ctx.getBean(EdgeSerializer.class); diff --git a/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java b/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java index b113f03..b94460a 100644 --- a/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java +++ b/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java @@ -20,16 +20,25 @@ package org.onap.aai.migration; -import com.att.eelf.configuration.Configuration; -import com.att.eelf.configuration.EELFLogger; -import com.att.eelf.configuration.EELFManager; -import com.beust.jcommander.JCommander; -import com.beust.jcommander.Parameter; +import java.io.File; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; + import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.io.IoCore; +import org.onap.aai.datasnapshot.DataSnapshot; import org.onap.aai.db.props.AAIProperties; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.dbmap.DBConnectionType; @@ -38,32 +47,24 @@ import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Loader; import org.onap.aai.introspection.LoaderFactory; import org.onap.aai.introspection.ModelType; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.setup.SchemaVersion; import org.onap.aai.logging.LoggingContext; import org.onap.aai.logging.LoggingContext.StatusCode; -import org.onap.aai.serialization.db.EdgeSerializer; -import org.onap.aai.serialization.engines.JanusGraphDBEngine; import org.onap.aai.serialization.engines.QueryStyle; +import org.onap.aai.serialization.engines.JanusGraphDBEngine; import org.onap.aai.serialization.engines.TransactionalGraphEngine; -import org.onap.aai.setup.SchemaVersion; -import org.onap.aai.setup.SchemaVersions; import org.onap.aai.util.AAIConstants; import org.onap.aai.util.FormatDate; import org.reflections.Reflections; import org.slf4j.MDC; -import java.io.File; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.stream.Collectors; - +import com.att.eelf.configuration.Configuration; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.beust.jcommander.JCommander; +import com.beust.jcommander.Parameter; /** * Runs a series of migrations from a defined directory based on the presence of @@ -73,262 +74,280 @@ import java.util.stream.Collectors; */ public class MigrationControllerInternal { - private EELFLogger logger; - private final int DANGER_ZONE = 10; - public static final String VERTEX_TYPE = "migration-list-1707"; - private final List<String> resultsSummary = new ArrayList<>(); - private final List<NotificationHelper> notifications = new ArrayList<>(); - private static final String SNAPSHOT_LOCATION = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migrationSnapshots"; - - private LoaderFactory loaderFactory; - private EdgeIngestor edgeIngestor; - private EdgeSerializer edgeSerializer; - private final SchemaVersions schemaVersions; - - public MigrationControllerInternal(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){ - this.loaderFactory = loaderFactory; - this.edgeIngestor = edgeIngestor; - this.edgeSerializer = edgeSerializer; - this.schemaVersions = schemaVersions; - } - - /** - * The main method. - * - * @param args - * the arguments - */ - public void run(String[] args) { - // Set the logging file properties to be used by EELFManager - System.setProperty("aai.service.name", MigrationController.class.getSimpleName()); - Properties props = System.getProperties(); - props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml"); - props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES); - - logger = EELFManager.getInstance().getLogger(MigrationControllerInternal.class.getSimpleName()); - MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); - - boolean loadSnapshot = false; - - CommandLineArgs cArgs = new CommandLineArgs(); - - JCommander jCommander = new JCommander(cArgs, args); - jCommander.setProgramName(MigrationController.class.getSimpleName()); - - // Set flag to load from snapshot based on the presence of snapshot and - // graph storage backend of inmemory - if (cArgs.dataSnapshot != null && !cArgs.dataSnapshot.isEmpty()) { - try { - PropertiesConfiguration config = new PropertiesConfiguration(cArgs.config); - if (config.getString("storage.backend").equals("inmemory")) { - loadSnapshot = true; - System.setProperty("load.snapshot.file", "true"); - System.setProperty("snapshot.location", cArgs.dataSnapshot); - } - } catch (ConfigurationException e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.DATA_ERROR); - logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e)); - return; - } - } - System.setProperty("realtime.db.config", cArgs.config); - logAndPrint("\n\n---------- Connecting to Graph ----------"); - AAIGraph.getInstance(); - - logAndPrint("---------- Connection Established ----------"); - SchemaVersion version = schemaVersions.getDefaultVersion(); - QueryStyle queryStyle = QueryStyle.TRAVERSAL; - ModelType introspectorFactoryType = ModelType.MOXY; - Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version); - TransactionalGraphEngine engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader); - - if (cArgs.help) { - jCommander.usage(); - engine.rollback(); - return; - } - - Reflections reflections = new Reflections("org.onap.aai.migration"); - List<Class<? extends Migrator>> migratorClasses = new ArrayList<>(findClasses(reflections)); - //Displays list of migration classes which needs to be executed.Pass flag "-l" following by the class names - if (cArgs.list) { - listMigrationWithStatus(cArgs, migratorClasses, engine); - return; - } - - logAndPrint("---------- Looking for migration scripts to be executed. ----------"); - //Excluding any migration class when run migration from script.Pass flag "-e" following by the class names - if (!cArgs.excludeClasses.isEmpty()) { - migratorClasses = filterMigrationClasses(cArgs.excludeClasses, migratorClasses); - listMigrationWithStatus(cArgs, migratorClasses, engine); - } - List<Class<? extends Migrator>> migratorClassesToRun = createMigratorList(cArgs, migratorClasses); - - sortList(migratorClassesToRun); - - if (!cArgs.scripts.isEmpty() && migratorClassesToRun.isEmpty()) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - logAndPrint("\tERROR: Failed to find migrations " + cArgs.scripts + "."); - logAndPrint("---------- Done ----------"); - LoggingContext.successStatusFields(); - } - - logAndPrint("\tFound " + migratorClassesToRun.size() + " migration scripts."); - logAndPrint("---------- Executing Migration Scripts ----------"); - - - if (!cArgs.skipPreMigrationSnapShot) { - takePreSnapshotIfRequired(engine, cArgs, migratorClassesToRun); - } - - for (Class<? extends Migrator> migratorClass : migratorClassesToRun) { - String name = migratorClass.getSimpleName(); - Migrator migrator; - if (cArgs.runDisabled.contains(name) || migratorClass.isAnnotationPresent(Enabled.class)) {//Check either of enabled annotation or runDisabled flag - - try { - engine.startTransaction(); - if (!cArgs.forced && hasAlreadyRun(name, engine)) { - logAndPrint("Migration " + name + " has already been run on this database and will not be executed again. Use -f to force execution"); - continue; - } - migrator = migratorClass - .getConstructor( - TransactionalGraphEngine.class, - LoaderFactory.class, - EdgeIngestor.class, - EdgeSerializer.class, - SchemaVersions.class - ).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions); - } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.DATA_ERROR); - logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e)); - LoggingContext.successStatusFields(); - engine.rollback(); - continue; - } - logAndPrint("\tRunning " + migratorClass.getSimpleName() + " migration script."); - logAndPrint("\t\t See " + System.getProperty("AJSC_HOME") + "/logs/migration/" + migratorClass.getSimpleName() + "/* for logs."); - MDC.put("logFilenameAppender", migratorClass.getSimpleName() + "/" + migratorClass.getSimpleName()); - - migrator.run(); - - commitChanges(engine, migrator, cArgs); - } else { - logAndPrint("\tSkipping " + migratorClass.getSimpleName() + " migration script because it has been disabled."); - } - } - MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); - for (NotificationHelper notificationHelper : notifications) { - try { - notificationHelper.triggerEvents(); - } catch (AAIException e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - logAndPrint("\tcould not event"); - logger.error("could not event", e); - LoggingContext.successStatusFields(); - } - } - logAndPrint("---------- Done ----------"); - - // Save post migration snapshot if snapshot was loaded - if (!cArgs.skipPostMigrationSnapShot) { - generateSnapshot(engine, "post"); - } - - outputResultsSummary(); - } - - /** - * This method is used to remove excluded classes from migration from the - * script command. - * - * @param excludeClasses - * : Classes to be removed from Migration - * @param migratorClasses - * : Classes to execute migration. - * @return - */ - private List<Class<? extends Migrator>> filterMigrationClasses( - List<String> excludeClasses, - List<Class<? extends Migrator>> migratorClasses) { - - List<Class<? extends Migrator>> filteredMigratorClasses = migratorClasses - .stream() - .filter(migratorClass -> !excludeClasses.contains(migratorClass - .getSimpleName())).collect(Collectors.toList()); - - return filteredMigratorClasses; - } - - private void listMigrationWithStatus(CommandLineArgs cArgs, - List<Class<? extends Migrator>> migratorClasses, TransactionalGraphEngine engine) { - sortList(migratorClasses); - engine.startTransaction(); - System.out.println("---------- List of all migrations ----------"); - migratorClasses.forEach(migratorClass -> { - boolean enabledAnnotation = migratorClass.isAnnotationPresent(Enabled.class); - String enabled = enabledAnnotation ? "Enabled" : "Disabled"; - StringBuilder sb = new StringBuilder(); - sb.append(migratorClass.getSimpleName()); - sb.append(" in package "); - sb.append(migratorClass.getPackage().getName().substring(migratorClass.getPackage().getName().lastIndexOf('.')+1)); - sb.append(" is "); - sb.append(enabled); - sb.append(" "); - sb.append("[" + getDbStatus(migratorClass.getSimpleName(), engine) + "]"); - System.out.println(sb.toString()); - }); - engine.rollback(); - System.out.println("---------- Done ----------"); - } - - private String getDbStatus(String name, TransactionalGraphEngine engine) { - if (hasAlreadyRun(name, engine)) { - return "Already executed in this env"; - } - return "Will be run on next execution if Enabled"; - } - - private boolean hasAlreadyRun(String name, TransactionalGraphEngine engine) { - return engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).has(name, true).hasNext(); - } - private Set<Class<? extends Migrator>> findClasses(Reflections reflections) { - Set<Class<? extends Migrator>> migratorClasses = reflections.getSubTypesOf(Migrator.class); - /* - * TODO- Change this to make sure only classes in the specific $release are added in the runList - * Or add a annotation like exclude which folks again need to remember to add ?? - */ - - migratorClasses.remove(PropertyMigrator.class); - migratorClasses.remove(EdgeMigrator.class); - return migratorClasses; - } - - - private void takePreSnapshotIfRequired(TransactionalGraphEngine engine, CommandLineArgs cArgs, List<Class<? extends Migrator>> migratorClassesToRun) { - - /*int sum = 0; - for (Class<? extends Migrator> migratorClass : migratorClassesToRun) { - if (migratorClass.isAnnotationPresent(Enabled.class)) { - sum += migratorClass.getAnnotation(MigrationPriority.class).value(); - } - } - - if (sum >= DANGER_ZONE) { - - logAndPrint("Entered Danger Zone. Taking snapshot."); - }*/ - - //always take snapshot for now - - generateSnapshot(engine, "pre"); - - } + private EELFLogger logger; + private final int DANGER_ZONE = 10; + public static final String VERTEX_TYPE = "migration-list-1707"; + private final List<String> resultsSummary = new ArrayList<>(); + private final List<NotificationHelper> notifications = new ArrayList<>(); + private static final String SNAPSHOT_LOCATION = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migrationSnapshots"; + + private LoaderFactory loaderFactory; + private EdgeIngestor edgeIngestor; + private EdgeSerializer edgeSerializer; + private final SchemaVersions schemaVersions; + + public MigrationControllerInternal(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){ + this.loaderFactory = loaderFactory; + this.edgeIngestor = edgeIngestor; + this.edgeSerializer = edgeSerializer; + this.schemaVersions = schemaVersions; + + } + + /** + * The main method. + * + * @param args + * the arguments + */ + public void run(String[] args) { + // Set the logging file properties to be used by EELFManager + System.setProperty("aai.service.name", MigrationController.class.getSimpleName()); + Properties props = System.getProperties(); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml"); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES); + + logger = EELFManager.getInstance().getLogger(MigrationControllerInternal.class.getSimpleName()); + MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); + + boolean loadSnapshot = false; + + CommandLineArgs cArgs = new CommandLineArgs(); + + JCommander jCommander = new JCommander(cArgs, args); + jCommander.setProgramName(MigrationController.class.getSimpleName()); + + // Set flag to load from snapshot based on the presence of snapshot and + // graph storage backend of inmemory + if (cArgs.dataSnapshot != null && !cArgs.dataSnapshot.isEmpty()) { + try { + PropertiesConfiguration config = new PropertiesConfiguration(cArgs.config); + if (config.getString("storage.backend").equals("inmemory")) { + loadSnapshot = true; +// System.setProperty("load.snapshot.file", "true"); + System.setProperty("snapshot.location", cArgs.dataSnapshot); + String snapshotLocation =cArgs.dataSnapshot; + String snapshotDir; + String snapshotFile; + int index = snapshotLocation.lastIndexOf("\\"); + if (index == -1){ + //Use default directory path + snapshotDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "snapshots"; + snapshotFile = snapshotLocation; + } else { + snapshotDir = snapshotLocation.substring(0, index+1); + snapshotFile = snapshotLocation.substring(index+1, snapshotLocation.length()) ; + } + String [] dataSnapShotArgs = {"-c","MULTITHREAD_RELOAD","-f", snapshotFile, "-oldFileDir",snapshotDir, "-caller","migration"}; + DataSnapshot dataSnapshot = new DataSnapshot(); + dataSnapshot.executeCommand(dataSnapShotArgs, true, false, null, "MULTITHREAD_RELOAD", snapshotFile); + } + } catch (ConfigurationException e) { + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e)); + return; + } + } + else { + System.setProperty("realtime.db.config", cArgs.config); + logAndPrint("\n\n---------- Connecting to Graph ----------"); + AAIGraph.getInstance(); + } + + logAndPrint("---------- Connection Established ----------"); + SchemaVersion version = schemaVersions.getDefaultVersion(); + QueryStyle queryStyle = QueryStyle.TRAVERSAL; + ModelType introspectorFactoryType = ModelType.MOXY; + Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version); + TransactionalGraphEngine engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader); + + if (cArgs.help) { + jCommander.usage(); + engine.rollback(); + return; + } + + Reflections reflections = new Reflections("org.onap.aai.migration"); + List<Class<? extends Migrator>> migratorClasses = new ArrayList<>(findClasses(reflections)); + //Displays list of migration classes which needs to be executed.Pass flag "-l" following by the class names + if (cArgs.list) { + listMigrationWithStatus(cArgs, migratorClasses, engine); + return; + } + + logAndPrint("---------- Looking for migration scripts to be executed. ----------"); + //Excluding any migration class when run migration from script.Pass flag "-e" following by the class names + if (!cArgs.excludeClasses.isEmpty()) { + migratorClasses = filterMigrationClasses(cArgs.excludeClasses, migratorClasses); + listMigrationWithStatus(cArgs, migratorClasses, engine); + } + List<Class<? extends Migrator>> migratorClassesToRun = createMigratorList(cArgs, migratorClasses); + + sortList(migratorClassesToRun); + + if (!cArgs.scripts.isEmpty() && migratorClassesToRun.isEmpty()) { + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); + logAndPrint("\tERROR: Failed to find migrations " + cArgs.scripts + "."); + logAndPrint("---------- Done ----------"); + LoggingContext.successStatusFields(); + } + + logAndPrint("\tFound " + migratorClassesToRun.size() + " migration scripts."); + logAndPrint("---------- Executing Migration Scripts ----------"); + + + if (!cArgs.skipPreMigrationSnapShot) { + takePreSnapshotIfRequired(engine, cArgs, migratorClassesToRun); + } + + for (Class<? extends Migrator> migratorClass : migratorClassesToRun) { + String name = migratorClass.getSimpleName(); + Migrator migrator; + if (cArgs.runDisabled.contains(name) || migratorClass.isAnnotationPresent(Enabled.class)) { + + try { + engine.startTransaction(); + if (!cArgs.forced && hasAlreadyRun(name, engine)) { + logAndPrint("Migration " + name + " has already been run on this database and will not be executed again. Use -f to force execution"); + continue; + } + migrator = migratorClass + .getConstructor( + TransactionalGraphEngine.class, + LoaderFactory.class, + EdgeIngestor.class, + EdgeSerializer.class, + SchemaVersions.class + ).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions); + } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e)); + LoggingContext.successStatusFields(); + engine.rollback(); + continue; + } + logAndPrint("\tRunning " + migratorClass.getSimpleName() + " migration script."); + logAndPrint("\t\t See " + System.getProperty("AJSC_HOME") + "/logs/migration/" + migratorClass.getSimpleName() + "/* for logs."); + MDC.put("logFilenameAppender", migratorClass.getSimpleName() + "/" + migratorClass.getSimpleName()); + + migrator.run(); + + commitChanges(engine, migrator, cArgs); + } else { + logAndPrint("\tSkipping " + migratorClass.getSimpleName() + " migration script because it has been disabled."); + } + } + MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); + for (NotificationHelper notificationHelper : notifications) { + try { + notificationHelper.triggerEvents(); + } catch (AAIException e) { + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); + logAndPrint("\tcould not event"); + logger.error("could not event", e); + LoggingContext.successStatusFields(); + } + } + logAndPrint("---------- Done ----------"); + + // Save post migration snapshot if snapshot was loaded + if (!cArgs.skipPostMigrationSnapShot) { + generateSnapshot(engine, "post"); + } + + outputResultsSummary(); + } + + /** + * This method is used to remove excluded classes from migration from the + * script command. + * + * @param excludeClasses + * : Classes to be removed from Migration + * @param migratorClasses + * : Classes to execute migration. + * @return + */ + private List<Class<? extends Migrator>> filterMigrationClasses( + List<String> excludeClasses, + List<Class<? extends Migrator>> migratorClasses) { + + List<Class<? extends Migrator>> filteredMigratorClasses = migratorClasses + .stream() + .filter(migratorClass -> !excludeClasses.contains(migratorClass + .getSimpleName())).collect(Collectors.toList()); + + return filteredMigratorClasses; + } + + private void listMigrationWithStatus(CommandLineArgs cArgs, + List<Class<? extends Migrator>> migratorClasses, TransactionalGraphEngine engine) { + sortList(migratorClasses); + engine.startTransaction(); + System.out.println("---------- List of all migrations ----------"); + migratorClasses.forEach(migratorClass -> { + boolean enabledAnnotation = migratorClass.isAnnotationPresent(Enabled.class); + String enabled = enabledAnnotation ? "Enabled" : "Disabled"; + StringBuilder sb = new StringBuilder(); + sb.append(migratorClass.getSimpleName()); + sb.append(" in package "); + sb.append(migratorClass.getPackage().getName().substring(migratorClass.getPackage().getName().lastIndexOf('.')+1)); + sb.append(" is "); + sb.append(enabled); + sb.append(" "); + sb.append("[" + getDbStatus(migratorClass.getSimpleName(), engine) + "]"); + System.out.println(sb.toString()); + }); + engine.rollback(); + System.out.println("---------- Done ----------"); + } + + private String getDbStatus(String name, TransactionalGraphEngine engine) { + if (hasAlreadyRun(name, engine)) { + return "Already executed in this env"; + } + return "Will be run on next execution if Enabled"; + } + + private boolean hasAlreadyRun(String name, TransactionalGraphEngine engine) { + return engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).has(name, true).hasNext(); + } + private Set<Class<? extends Migrator>> findClasses(Reflections reflections) { + Set<Class<? extends Migrator>> migratorClasses = reflections.getSubTypesOf(Migrator.class); + /* + * TODO- Change this to make sure only classes in the specific $release are added in the runList + * Or add a annotation like exclude which folks again need to remember to add ?? + */ + + migratorClasses.remove(PropertyMigrator.class); + migratorClasses.remove(EdgeMigrator.class); + return migratorClasses; + } + + + private void takePreSnapshotIfRequired(TransactionalGraphEngine engine, CommandLineArgs cArgs, List<Class<? extends Migrator>> migratorClassesToRun) { + + /*int sum = 0; + for (Class<? extends Migrator> migratorClass : migratorClassesToRun) { + if (migratorClass.isAnnotationPresent(Enabled.class)) { + sum += migratorClass.getAnnotation(MigrationPriority.class).value(); + } + } + + if (sum >= DANGER_ZONE) { + + logAndPrint("Entered Danger Zone. Taking snapshot."); + }*/ + + //always take snapshot for now + + generateSnapshot(engine, "pre"); + + } private List<Class<? extends Migrator>> createMigratorList(CommandLineArgs cArgs, @@ -345,6 +364,7 @@ public class MigrationControllerInternal { } return migratorClassesToRun; } + private boolean migratorExplicitlySpecified(CommandLineArgs cArgs, String migratorName){ return !cArgs.scripts.isEmpty() && cArgs.scripts.contains(migratorName); } @@ -352,122 +372,125 @@ public class MigrationControllerInternal { return !cArgs.runDisabled.isEmpty() && cArgs.runDisabled.contains(migratorName); } - private void sortList(List<Class<? extends Migrator>> migratorClasses) { - Collections.sort(migratorClasses, (m1, m2) -> { - try { - if (m1.getAnnotation(MigrationPriority.class).value() > m2.getAnnotation(MigrationPriority.class).value()) { - return 1; - } else if (m1.getAnnotation(MigrationPriority.class).value() < m2.getAnnotation(MigrationPriority.class).value()) { - return -1; - } else { - return m1.getSimpleName().compareTo(m2.getSimpleName()); - } - } catch (Exception e) { - return 0; - } - }); - } - - - private void generateSnapshot(TransactionalGraphEngine engine, String phase) { - - FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); - String dateStr= fd.getDateTime(); - String fileName = SNAPSHOT_LOCATION + File.separator + phase + "Migration." + dateStr + ".graphson"; - logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName); - Graph transaction = null; - try { - - Path pathToFile = Paths.get(fileName); - if (!pathToFile.toFile().exists()) { - Files.createDirectories(pathToFile.getParent()); - } - transaction = engine.startTransaction(); - transaction.io(IoCore.graphson()).writeGraph(fileName); - engine.rollback(); - } catch (IOException e) { - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e)); - LoggingContext.successStatusFields(); - engine.rollback(); - } - - logAndPrint( phase + " migration snapshot saved to " + fileName); - } - /** - * Log and print. - * - * @param msg - * the msg - */ - protected void logAndPrint(String msg) { - System.out.println(msg); - logger.info(msg); - } - - /** - * Commit changes. - * - * @param engine - * the graph transaction - * @param migrator - * the migrator - * @param cArgs - */ - protected void commitChanges(TransactionalGraphEngine engine, Migrator migrator, CommandLineArgs cArgs) { - - String simpleName = migrator.getClass().getSimpleName(); - String message; - if (migrator.getStatus().equals(Status.FAILURE)) { - message = "Migration " + simpleName + " Failed. Rolling back."; - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.DATA_ERROR); - logAndPrint("\t" + message); - LoggingContext.successStatusFields(); - migrator.rollback(); - } else if (migrator.getStatus().equals(Status.CHECK_LOGS)) { - message = "Migration " + simpleName + " encountered an anomaly, check logs. Rolling back."; - LoggingContext.statusCode(StatusCode.ERROR); - LoggingContext.responseCode(LoggingContext.DATA_ERROR); - logAndPrint("\t" + message); - LoggingContext.successStatusFields(); - migrator.rollback(); - } else { - MDC.put("logFilenameAppender", simpleName + "/" + simpleName); - - if (cArgs.commit) { - if (!engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).hasNext()) { - engine.asAdmin().getTraversalSource().addV(AAIProperties.NODE_TYPE, VERTEX_TYPE).iterate(); - } - engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE) - .property(simpleName, true).iterate(); - MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); - notifications.add(migrator.getNotificationHelper()); - migrator.commit(); - message = "Migration " + simpleName + " Succeeded. Changes Committed."; - logAndPrint("\t"+ message +"\t"); - } else { - message = "--commit not specified. Not committing changes for " + simpleName + " to database."; - logAndPrint("\t" + message); - migrator.rollback(); - } - - } - - resultsSummary.add(message); - - } - - private void outputResultsSummary() { - logAndPrint("---------------------------------"); - logAndPrint("-------------Summary-------------"); - for (String result : resultsSummary) { - logAndPrint(result); - } - logAndPrint("---------------------------------"); - logAndPrint("---------------------------------"); - } + private void sortList(List<Class<? extends Migrator>> migratorClasses) { + Collections.sort(migratorClasses, (m1, m2) -> { + try { + if (m1.getAnnotation(MigrationPriority.class).value() > m2.getAnnotation(MigrationPriority.class).value()) { + return 1; + } else if (m1.getAnnotation(MigrationPriority.class).value() < m2.getAnnotation(MigrationPriority.class).value()) { + return -1; + } else { + return m1.getSimpleName().compareTo(m2.getSimpleName()); + } + } catch (Exception e) { + return 0; + } + }); + } + + + private void generateSnapshot(TransactionalGraphEngine engine, String phase) { + + FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); + String dateStr= fd.getDateTime(); + String fileName = SNAPSHOT_LOCATION + File.separator + phase + "Migration." + dateStr + ".graphson"; + logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName); + Graph transaction = null; + try { + + Path pathToFile = Paths.get(fileName); + if (!pathToFile.toFile().exists()) { + Files.createDirectories(pathToFile.getParent()); + } + String [] dataSnapshotArgs = {"-c","THREADED_SNAPSHOT", "-fileName",fileName, "-caller","migration"}; + DataSnapshot dataSnapshot = new DataSnapshot(); + dataSnapshot.executeCommand(dataSnapshotArgs, true, false, null, "THREADED_SNAPSHOT", null); +// transaction = engine.startTransaction(); +// transaction.io(IoCore.graphson()).writeGraph(fileName); +// engine.rollback(); + } catch (IOException e) { + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); + logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e)); + LoggingContext.successStatusFields(); + engine.rollback(); + } + + logAndPrint( phase + " migration snapshot saved to " + fileName); + } + /** + * Log and print. + * + * @param msg + * the msg + */ + protected void logAndPrint(String msg) { + System.out.println(msg); + logger.info(msg); + } + + /** + * Commit changes. + * + * @param engine + * the graph transaction + * @param migrator + * the migrator + * @param cArgs + */ + protected void commitChanges(TransactionalGraphEngine engine, Migrator migrator, CommandLineArgs cArgs) { + + String simpleName = migrator.getClass().getSimpleName(); + String message; + if (migrator.getStatus().equals(Status.FAILURE)) { + message = "Migration " + simpleName + " Failed. Rolling back."; + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + logAndPrint("\t" + message); + LoggingContext.successStatusFields(); + migrator.rollback(); + } else if (migrator.getStatus().equals(Status.CHECK_LOGS)) { + message = "Migration " + simpleName + " encountered an anomaly, check logs. Rolling back."; + LoggingContext.statusCode(StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + logAndPrint("\t" + message); + LoggingContext.successStatusFields(); + migrator.rollback(); + } else { + MDC.put("logFilenameAppender", simpleName + "/" + simpleName); + + if (cArgs.commit) { + if (!engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).hasNext()) { + engine.asAdmin().getTraversalSource().addV(AAIProperties.NODE_TYPE, VERTEX_TYPE).iterate(); + } + engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE) + .property(simpleName, true).iterate(); + MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); + notifications.add(migrator.getNotificationHelper()); + migrator.commit(); + message = "Migration " + simpleName + " Succeeded. Changes Committed."; + logAndPrint("\t"+ message +"\t"); + } else { + message = "--commit not specified. Not committing changes for " + simpleName + " to database."; + logAndPrint("\t" + message); + migrator.rollback(); + } + + } + + resultsSummary.add(message); + + } + + private void outputResultsSummary() { + logAndPrint("---------------------------------"); + logAndPrint("-------------Summary-------------"); + for (String result : resultsSummary) { + logAndPrint(result); + } + logAndPrint("---------------------------------"); + logAndPrint("---------------------------------"); + } } diff --git a/src/main/java/org/onap/aai/migration/Migrator.java b/src/main/java/org/onap/aai/migration/Migrator.java index 106d5e4..791fec0 100644 --- a/src/main/java/org/onap/aai/migration/Migrator.java +++ b/src/main/java/org/onap/aai/migration/Migrator.java @@ -135,11 +135,11 @@ public abstract class Migrator implements Runnable { if (dmaapMsgList.size() > 0) { try { Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator); - } catch (IOException e) { - logger.error("Unable to generate file with dmaap msgs for MigrateHUBEvcInventory", e); + } catch (IOException e) { + logger.error("Unable to generate file with dmaap msgs for " + getMigrationName(), e); } } else { - logger.info("No dmaap msgs detected for MigrateForwardEvcCircuitId"); + logger.info("No dmaap msgs detected for " + getMigrationName()); } } @@ -304,6 +304,28 @@ public abstract class Migrator implements Runnable { } return newEdge; } + + /** + * Creates the edge + * + * @param type the edge type - COUSIN or TREE + * @param out the out + * @param in the in + * @return the edge + */ + protected Edge createEdgeIfPossible(EdgeType type, Vertex out, Vertex in) throws AAIException { + Edge newEdge = null; + try { + if (type.equals(EdgeType.COUSIN)){ + newEdge = edgeSerializer.addEdgeIfPossible(this.engine.asAdmin().getTraversalSource(), out, in); + } else { + newEdge = edgeSerializer.addTreeEdgeIfPossible(this.engine.asAdmin().getTraversalSource(), out, in); + } + } catch (NoEdgeRuleFoundException e) { + throw new AAIException("AAI_6129", e); + } + return newEdge; + } /** * Creates the edge diff --git a/src/main/java/org/onap/aai/migration/ValueMigrator.java b/src/main/java/org/onap/aai/migration/ValueMigrator.java index 6d02563..458796a 100644 --- a/src/main/java/org/onap/aai/migration/ValueMigrator.java +++ b/src/main/java/org/onap/aai/migration/ValueMigrator.java @@ -19,7 +19,12 @@ */ package org.onap.aai.migration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.Map.Entry; + import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.onap.aai.db.props.AAIProperties; @@ -38,9 +43,16 @@ import org.onap.aai.setup.SchemaVersions; public abstract class ValueMigrator extends Migrator { protected final Map<String, Map<String, ?>> propertyValuePairByNodeType; + protected Map<String, List<?>> conditionsMap; protected final Boolean updateExistingValues; - protected final JanusGraphManagement graphMgmt; - + protected final JanusGraphManagement graphMgmt; + + private int migrationSuccess = 0; + private Map<String, String> nodeTotalSuccess = new HashMap<>(); + private int subTotal = 0; + + private static List<String> dmaapMsgList = new ArrayList<String>(); + /** * * @param engine @@ -53,6 +65,23 @@ public abstract class ValueMigrator extends Migrator { this.updateExistingValues = updateExistingValues; this.graphMgmt = engine.asAdmin().getManagementSystem(); } + + //Migrate with property conditions + public ValueMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, Map propertyValuePairByNodeType, Map conditionsMap, Boolean updateExistingValues) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.propertyValuePairByNodeType = propertyValuePairByNodeType; + this.updateExistingValues = updateExistingValues; + this.conditionsMap = conditionsMap; + this.graphMgmt = engine.asAdmin().getManagementSystem(); + } + + @Override + public void commit() { + engine.commit(); + if(isUpdateDmaap()){ + createDmaapFiles(this.dmaapMsgList); + } + } /** * Do not override this method as an inheritor of this class @@ -64,41 +93,95 @@ public abstract class ValueMigrator extends Migrator { protected void updateValues() { for (Map.Entry<String, Map<String, ?>> entry: propertyValuePairByNodeType.entrySet()) { - String nodeType = entry.getKey(); + String nodeType = entry.getKey(); + this.subTotal = 0; + Map<String, ?> propertyValuePair = entry.getValue(); for (Map.Entry<String, ?> pair : propertyValuePair.entrySet()) { - String property = pair.getKey(); + String property = pair.getKey(); Object newValue = pair.getValue(); try { GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V() - .has(AAIProperties.NODE_TYPE, nodeType); + .has(AAIProperties.NODE_TYPE, nodeType); while (g.hasNext()) { Vertex v = g.next(); - if (v.property(property).isPresent() && !updateExistingValues) { - String propertyValue = v.property(property).value().toString(); - if (propertyValue.isEmpty()) { - v.property(property, newValue); - logger.info(String.format("Node Type %s: Property %s is empty, adding value %s", - nodeType, property, newValue.toString())); - this.touchVertexProperties(v, false); - } else { - logger.info(String.format("Node Type %s: Property %s value already exists - skipping", - nodeType, property)); - } - } else { - logger.info(String.format("Node Type %s: Property %s does not exist or " + - "updateExistingValues flag is set to True - adding the property with value %s", - nodeType, property, newValue.toString())); - v.property(property, newValue); - this.touchVertexProperties(v, false); - } + + if (this.conditionsMap !=null){ + checkConditions( v, property, newValue, nodeType); + }else{ + migrateValues( v, property, newValue, nodeType); + } } } catch (Exception e) { logger.error(String.format("caught exception updating aai-node-type %s's property %s's value to " + "%s: %s", nodeType, property, newValue.toString(), e.getMessage())); logger.error(e.getMessage()); } + } + this.nodeTotalSuccess.put(nodeType, Integer.toString(this.subTotal)); + } + + logger.info ("\n \n ******* Final Summary for " + " " + getMigrationName() +" ********* \n"); + for (Map.Entry<String, String> migratedNode: nodeTotalSuccess.entrySet()) { + logger.info("Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue()); + + } + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess); + + } + + private void migrateValues (Vertex v, String property, Object newValue, String nodeType) throws Exception{ + + if (v.property(property).isPresent() && !updateExistingValues) { + String propertyValue = v.property(property).value().toString(); + if (propertyValue.isEmpty()) { + v.property(property, newValue); + logger.info(String.format("Node Type %s: Property %s is empty, adding value %s", + nodeType, property, newValue.toString())); + this.touchVertexProperties(v, false); + updateDmaapList(v); + this.migrationSuccess++; + this.subTotal++; + } else { + logger.info(String.format("Node Type %s: Property %s value already exists - skipping", + nodeType, property)); } + } else { + logger.info(String.format("Node Type %s: Property %s does not exist or " + + "updateExistingValues flag is set to True - adding the property with value %s", + nodeType, property, newValue.toString())); + v.property(property, newValue); + this.touchVertexProperties(v, false); + updateDmaapList(v); + this.migrationSuccess++; + this.subTotal++; } } -} + + private void checkConditions(Vertex v, String property, Object newValue, String nodeType) throws Exception{ + + for (Map.Entry<String, List<?>> entry: conditionsMap.entrySet()){ + String conditionType = entry.getKey(); + List <?> conditionsValueList = conditionsMap.get(conditionType); + + if(v.property(conditionType).isPresent()){ + for (int i = 0; i < conditionsValueList.size(); i++){ + if (v.property(conditionType).value().equals(conditionsValueList.get(i))){ + migrateValues( v, property, newValue, nodeType); + break; + } + } + } + } + } + + private void updateDmaapList(Vertex v){ + String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_" + v.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList...."); + } + + public boolean isUpdateDmaap(){ + return false; + } +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java b/src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java new file mode 100644 index 0000000..ef45209 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java @@ -0,0 +1,200 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.*; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.edges.enums.EdgeType; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(11) +@MigrationDangerRating(0) +public class ALTSLicenseEntitlementMigration extends Migrator{ + + private final String LICENSE_NODE_TYPE = "license"; + private final String ENTITLEMENT_NODE_TYPE = "entitlement"; + private boolean success = true; + private final GraphTraversalSource g; + private int headerLength; + + + public ALTSLicenseEntitlementMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Update ALTS Entitlements and Licenses resource-uuid in generic-vnf ----------"); + String homeDir = System.getProperty("AJSC_HOME"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + int fileLineCounter = 0; + String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/ALTS-migration-data/ALTS-migration-input.csv"; + Map<String, Set<String>> history = new HashMap<>(); + logger.info(fileName); + logger.info("---------- Processing VNFs from file ----------"); + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String vnfLine; + while ((vnfLine = br.readLine()) != null) { + vnfLine = vnfLine.replace("\n", "").replace("\r", ""); + logger.info("\n"); + if (!vnfLine.isEmpty()) { + if (fileLineCounter != 0) { + String[] fields = vnfLine.split("\\s*,\\s*", -1); + if (fields.length != this.headerLength) { + logger.info("ERROR: Vnf line should contain " + this.headerLength + " columns, contains " + fields.length + " instead."); + success = false; + continue; + } + String newResourceUuid = fields[0]; + String groupUuid = fields[1]; + String vnfId = fields[19]; + logger.info("---------- Processing Line " + vnfLine + "----------"); + logger.info("newResourceUuid = " + newResourceUuid + " vnfId = " + vnfId + " group uuid = " + groupUuid); + if (history.containsKey(vnfId)){ + if (history.get(vnfId).contains(groupUuid)){ + logger.info("ERROR: duplicate groupUuid in vnf - skipping"); + fileLineCounter++; + continue; + } + else{ + history.get(vnfId).add(groupUuid); + } + } + else { + Set newSet = new HashSet(); + newSet.add(groupUuid); + history.put(vnfId, newSet); + } + List<Vertex> entitlements = g.V().has(AAIProperties.NODE_TYPE, "entitlement").has("group-uuid", groupUuid) + .where(this.engine.getQueryBuilder().createEdgeTraversal(EdgeType.TREE, "entitlement", "generic-vnf").getVerticesByProperty("vnf-id", vnfId) + .<GraphTraversal<?, ?>>getQuery()).toList(); + + List<Vertex> licenses = g.V().has(AAIProperties.NODE_TYPE, "license").has("group-uuid", groupUuid) + .where(this.engine.getQueryBuilder().createEdgeTraversal(EdgeType.TREE, "license", "generic-vnf").getVerticesByProperty("vnf-id", vnfId) + .<GraphTraversal<?, ?>>getQuery()).toList(); + + this.ChangeResourceUuid(entitlements, newResourceUuid, "entitlements", vnfId, groupUuid); + this.ChangeResourceUuid(licenses, newResourceUuid, "license", vnfId, groupUuid); + + } else { + this.headerLength = vnfLine.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 22){ + logger.info("ERROR: Input file should have 22 columns"); + this.success = false; + return; + } + } + } + fileLineCounter++; + } + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not find file " + fileName, e); + success = false; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + private void ChangeResourceUuid(List<Vertex> vertices, String newResourceUuid, String nodeType, String vnfId, String groupUuid){ + if (vertices.size() > 1) { + logger.info("\t More than 1 " + nodeType + "found, skipping"); + return; + } + else if (vertices.size() == 1) { + try { + logger.info(String.format("Updating %s with groupUuid %s from generic-vnf with vnfId %s with newResourceUuid %s", nodeType, groupUuid, vnfId, newResourceUuid)); + Vertex v = vertices.get(0); + String resourceUuid = v.<String>property("resource-uuid").value(); + logger.info("\tOriginal resource-uuid: " + resourceUuid); + v.property("resource-uuid", newResourceUuid); + + String aaiUri = v.<String>property(AAIProperties.AAI_URI).value(); + if (aaiUri != null) { + logger.info("\tOriginal aaiUri: " + aaiUri); + aaiUri = aaiUri.replaceFirst("[^/]*"+resourceUuid + "$", newResourceUuid); + v.property(AAIProperties.AAI_URI, aaiUri); + logger.info("\tNew aaiUri: " + v.value(AAIProperties.AAI_URI).toString()); + } + + this.touchVertexProperties(v, false); + logger.info("\tNew resource-uuid: " + newResourceUuid); + } + catch (Exception e){ + logger.info("\t ERROR: caught exception: " + e.getMessage()); + } + } + else { + logger.info("\t No " + nodeType + " found with group-uuid "+ groupUuid + " for generic-vnf " +vnfId); + return; + } + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{LICENSE_NODE_TYPE, ENTITLEMENT_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "ALTSLicenseEntitlementMigration"; + } + +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java b/src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java new file mode 100644 index 0000000..819c7d4 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java @@ -0,0 +1,138 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConstants; + +import java.io.*; +import java.util.Optional; + +@MigrationPriority(20) +@MigrationDangerRating(2) +//@Enabled +public class MigrateDataFromASDCToConfiguration extends Migrator { + private final String PARENT_NODE_TYPE = "generic-vnf"; + private boolean success = true; + private String entitlementPoolUuid = ""; + private String VNT = ""; + + + public MigrateDataFromASDCToConfiguration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + + @Override + public void run() { + + String homeDir = System.getProperty("AJSC_HOME"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + + String csvFile = homeDir + AAIConstants.AAI_FILESEP + configDir + + AAIConstants.AAI_FILESEP + "migration-input-files" + + AAIConstants.AAI_FILESEP + "VNT-migration-data" + + AAIConstants.AAI_FILESEP + "VNT-migration-input.csv"; + + logger.info("Reading Csv file: " + csvFile); + BufferedReader br = null; + String line = ""; + String cvsSplitBy = "\t"; + try { + + br = new BufferedReader(new FileReader(new File(csvFile))); + while ((line = br.readLine()) != null) { + line = line.replaceAll("\"", ""); + String[] temp = line.split(cvsSplitBy); + if ("entitlement-pool-uuid".equals(temp[0]) || "vendor-allowed-max-bandwidth (VNT)".equals(temp[1])) { + continue; + } + entitlementPoolUuid = temp[0]; + VNT = temp[1]; + GraphTraversal<Vertex, Vertex> f = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, "entitlement").has("group-uuid", entitlementPoolUuid) + .out("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, "generic-vnf") + .has("vnf-type", "HN").in("org.onap.relationships.inventory.ComposedOf").has(AAIProperties.NODE_TYPE, "service-instance").out("org.onap.relationships.inventory.Uses").has(AAIProperties.NODE_TYPE, "configuration"); + + modify(f); + } + + } catch (FileNotFoundException e) { + success = false; + logger.error("Found Exception" , e); + } catch (IOException e) { + success = false; + logger.error("Found Exception" , e); + } catch (Exception a) { + success= false; + logger.error("Found Exception" , a); + } finally { + try { + br.close(); + } catch (IOException e) { + success = false; + logger.error("Found Exception" , e); + } + } + + } + + public void modify(GraphTraversal<Vertex, Vertex> g) { + int count = 0; + while (g.hasNext()) { + Vertex v = g.next(); + logger.info("Found node type " + v.property("aai-node-type").value().toString() + " with configuration id: " + v.property("configuration-id").value().toString()); + v.property("vendor-allowed-max-bandwidth", VNT); + logger.info("VNT val after migration: " + v.property("vendor-allowed-max-bandwidth").value().toString()); + count++; + } + + logger.info("modified " + count + " configuration nodes related to Entitlement UUID: " +entitlementPoolUuid); + + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{PARENT_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateDataFromASDCToConfiguration"; + } + + +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java new file mode 100644 index 0000000..0b3103b --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java @@ -0,0 +1,293 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; + + +@MigrationPriority(31) +@MigrationDangerRating(100) +//@Enabled +public class MigrateHUBEvcInventory extends Migrator { + + private static final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc"; + + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + + private static int processedEvcsCount = 0; + private static int falloutRowsCount = 0; + private static List<String> processedEvcsList = new ArrayList<String>(); + private static Map<String, String> falloutLinesMap = new HashMap<String, String>(); + + private static final String homeDir = System.getProperty("AJSC_HOME"); + private static List<String> dmaapMsgList = new ArrayList<String>(); + + + public MigrateHUBEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of HUB EVC Inventory ----------"); + String homeDir = System.getProperty("AJSC_HOME"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "hub.csv"; + logger.info(fileName); + logger.info("---------- Processing HUB Entries from file ----------"); + try { + String line; + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + line = lineItr.next(); + logger.info("\n"); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); +// if (colList.length != headerLength) { +// logger.info("ERROR: HUB line entry should contain " + headerLength + " columns, contains " + colList.length + " instead."); +// success = false; +// continue; +// } + Map<String, String> hubColValues = new HashMap<String, String>(); + hubColValues.put("ivlan", colList[1]); + hubColValues.put("nniSvlan", colList[3]); + hubColValues.put("evcName", colList[4]); + + String evcName = hubColValues.get("evcName"); + String ivlan = hubColValues.get("ivlan"); + String nniSvlan = hubColValues.get("nniSvlan"); + if (!AAIConfig.isEmpty(evcName)) { + logger.info("---------- Processing Line " + line + "----------"); + logger.info("\t Evc Name = " + evcName ); + + List<Vertex> forwarderEvcList = g.V().has ("forwarding-path-id", evcName).has(AAIProperties.NODE_TYPE, "forwarding-path") + .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder") + .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration") + .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc").toList(); + + + if (forwarderEvcList == null || forwarderEvcList.isEmpty()){ + logger.info("\t ERROR: Forwarder-evc does not exist for evc-id = " + evcName + " - skipping"); + falloutLinesMap.put(String.valueOf(fileLineCounter+1), "["+evcName+"] - Forwarder-evc does not exist" ); + falloutRowsCount++; + } + else if (forwarderEvcList!= null && !forwarderEvcList.isEmpty()) { + Iterator<Vertex> listItr = forwarderEvcList.iterator(); + while (listItr.hasNext()){ + Vertex forwarderEvcVtx = listItr.next(); + if (forwarderEvcVtx != null && forwarderEvcVtx.property("forwarder-evc-id").isPresent() && !AAIConfig.isEmpty(ivlan )) { + boolean isUpdated = updateIvlanOnForwarder(forwarderEvcVtx, ivlan, nniSvlan ); + if (!isUpdated){ + falloutLinesMap.put(String.valueOf(fileLineCounter+1), "["+evcName+"] - Forwarder-evc does not have svlan populated" ); + falloutRowsCount++; + } + } + } + if (!processedEvcsList.contains(evcName)) { + processedEvcsList.add(evcName); + processedEvcsCount++; + } + + } + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 5){ + logger.info("ERROR: Input file should have 5 columns"); + MigrateHUBEvcInventory.success = false; + return; + } + } + } + + fileLineCounter++; + } + + logger.info ("\n \n ******* Final Summary for HUB FILE Migration ********* \n"); + logger.info("Evcs processed: "+processedEvcsCount); + logger.info("Total Rows Count: "+(fileLineCounter + 1)); + logger.info("Fallout Rows Count : "+falloutRowsCount +"\n"); + if (!falloutLinesMap.isEmpty()) { + logger.info("------ Fallout Details: ------"); + falloutLinesMap.forEach((lineNumber, errorMsg) -> { + logger.info(errorMsg + ": on row "+lineNumber.toString()); + }); + } + + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e.getMessage()); + success = false; + } + } + + private boolean updateIvlanOnForwarder(Vertex forwarderEvcVtx, String ivlan, String nniSvlan) throws Exception { + + boolean isUpdated = true; + String forwarderEvcId = forwarderEvcVtx.value("forwarder-evc-id"); + + String forwarderSvlan = null; + if( forwarderEvcVtx.property("svlan").isPresent()) { + forwarderSvlan = forwarderEvcVtx.value("svlan"); + } + if (forwarderSvlan != null && !forwarderSvlan.isEmpty()) { + int forwarderSvlanValue = Integer.parseInt(forwarderSvlan); + int nniSvlanValue = Integer.parseInt(nniSvlan); + if (forwarderSvlan != null && nniSvlan != null && (forwarderSvlanValue == nniSvlanValue)) { + if (ivlan != null && !ivlan.isEmpty()) { + if (forwarderEvcVtx.property("ivlan").isPresent()) { + String forwarderIvlan = forwarderEvcVtx.value("ivlan"); + if (forwarderIvlan != null && !forwarderIvlan.isEmpty()) { + if (Integer.parseInt(forwarderIvlan) == Integer.parseInt(ivlan)) { + logger.info("\t Skipped update ivlan for forwarder-evc[" + forwarderEvcId + + "], ivlan already set to expected value"); + } else { + logger.info("\t Start ivlan update for forwarder-evc[" + forwarderEvcId + "]"); + updateIvlan(forwarderEvcVtx, ivlan, forwarderEvcId); + } + } + } else { + updateIvlan(forwarderEvcVtx, ivlan, forwarderEvcId); + } + } + } + } else { + logger.info("Skipping ivlan update, svlan is not present on the forwarder-evc ["+forwarderEvcId +"]" ); + isUpdated = false; + } + return isUpdated; + } + + private void updateIvlan(Vertex forwarderEvcVtx, String ivlan, String forwarderEvcId) { + forwarderEvcVtx.property("ivlan", ivlan); + this.touchVertexProperties(forwarderEvcVtx, false); + logger.info("\t Updated ivlan to "+ ivlan + " on forwarder-evc[" + + forwarderEvcId + "]"); + String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_" + forwarderEvcVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); +// try { +// final Introspector evcIntrospector = serializer.getLatestVersionView(forwarderEvcVtx); +// this.notificationHelper.addEvent(forwarderEvcVtx, evcIntrospector, EventAction.UPDATE, +// this.serializer.getURIForVertex(forwarderEvcVtx, false)); +// } catch (UnsupportedEncodingException e) { +// logger.info("\t ERROR: Could not update ivlan on forwader-evc " + forwarderEvcVtx, e.getMessage()); +// } catch (AAIException e) { +// logger.info("\t ERROR: Could not update ivlan on forwarder-evc "+ forwarderEvcVtx, e.getMessage()); +// } + } + + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{MigrateHUBEvcInventory.FORWARDER_EVC_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateHUBEvcInventory"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java new file mode 100644 index 0000000..a9fce6a --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java @@ -0,0 +1,242 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.List; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; + + +@MigrationPriority(28) +@MigrationDangerRating(100) +public class MigrateINVEvcInventory extends Migrator { + + private static final String PROPERTY_EVC_ID = "evc-id"; + private static final String EVC_NODE_TYPE = "evc"; + + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + + private static int processedEvcsCount = 0; + private static int falloutEvcsCount = 0; + private static Map<String, String> falloutEvcsMap = new HashMap<String, String>(); + + private static final String homeDir = System.getProperty("AJSC_HOME"); + private static List<String> dmaapMsgList = new ArrayList<String>(); + + + public MigrateINVEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of INV EVC Inventory ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "inv.csv"; + logger.info(fileName); + logger.info("---------- Processing INV Entries from file ----------"); + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String line; + while ((line = br.readLine()) != null) { + line = line.replace("\n", "").replace("\r", ""); + logger.info("\n"); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + if (colList.length != headerLength) { + logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + colList.length + " instead."); + continue; + } + Map<String, String> invColValues = new HashMap<String, String>(); + invColValues.put("evcName", colList[22]); + invColValues.put("collectorInterconnectType", colList[17]); + + String evcName = invColValues.get("evcName"); + String interconnectType = invColValues.get("collectorInterconnectType"); + if (!AAIConfig.isEmpty(evcName) && !AAIConfig.isEmpty(interconnectType) ) { + logger.info("---------- Processing Line " + line + "----------"); + logger.info("\t Evc Name = " + evcName ); + + // For each provided evc-name, check if the evc already exists + List<Vertex> existingEvcList = g.V().has(PROPERTY_EVC_ID, evcName).has(AAIProperties.NODE_TYPE, EVC_NODE_TYPE).toList(); + if (existingEvcList == null || existingEvcList.size() == 0){ + logger.info("\t ERROR: Evc does not exist with evc-id = " + evcName + " - skipping"); + falloutEvcsCount++; + falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - Evc does not exist" ); + } + else if (existingEvcList!= null && existingEvcList.size() == 1) { + Vertex evcVtx = existingEvcList.get(0); + if (evcVtx != null && !AAIConfig.isEmpty(interconnectType )) { + updateEvcInterconnectType(evcVtx, interconnectType ); + } + processedEvcsCount++; + } + else if (existingEvcList!= null && existingEvcList.size() > 1) { + logger.info("\t ERROR: More than one EVC exist with evc-id = " + evcName + " - skipping"); + falloutEvcsCount++; + falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - More than one EVC exist with evc-id" ); + } + } else { + logger.info("---------- Processing Line " + line + "----------"); + logger.info("Invalid line entry : evcName: "+evcName + " interConnectType: "+ interconnectType); + continue; + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 23){ + logger.info("ERROR: Input file should have 23 columns"); + this.success = false; + return; + } + } + } + + fileLineCounter++; + } + + logger.info ("\n \n ******* Final Summary for INV FILE Migration ********* \n"); + logger.info("Evcs processed: "+processedEvcsCount); + logger.info("Fallout Evcs count: "+falloutEvcsCount); + if (!falloutEvcsMap.isEmpty()) { + logger.info("------ Fallout Details: ------"); + falloutEvcsMap.forEach((lineNumber, errorMsg) -> { + logger.info(errorMsg + ": on row "+lineNumber.toString()); + }); + } + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + + private void updateEvcInterconnectType(Vertex evcVtx, String interconnectType) { + + String evcId = evcVtx.value("evc-id"); + if (interconnectType != null && !interconnectType.isEmpty()){ + evcVtx.property("inter-connect-type-ingress", interconnectType); + this.touchVertexProperties(evcVtx, false); + logger.info("\t Updated inter-connect-type-ingress property for evc [" + evcId +"]"); + String dmaapMsg = System.nanoTime() + "_" + evcVtx.id().toString() + "_" + evcVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); +// try { +// final Introspector evcIntrospector = serializer.getLatestVersionView(evcVtx); +// this.notificationHelper.addEvent(evcVtx, evcIntrospector, EventAction.UPDATE, this.serializer +// .getURIForVertex(evcVtx, false)); +// } catch (UnsupportedEncodingException e) { +// logger.info("\t ERROR: Could not send update notification for evc " + evcId, e.getMessage()); +// } catch (AAIException e) { +// logger.info("\t ERROR: Could not send update notification for evc " + evcId, e.getMessage()); +// } + } + } + + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{MigrateINVEvcInventory.EVC_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateINVEvcInventory"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java new file mode 100644 index 0000000..0c85481 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java @@ -0,0 +1,361 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + + +@MigrationPriority(25) +@MigrationDangerRating(100) +public class MigrateINVPhysicalInventory extends Migrator { + + private static final String NODE_TYPE_PNF = "pnf"; + private static final String NODE_TYPE_PINTERFACE = "p-interface"; + private static final String NODE_TYPE_PINTERFACES = "p-interfaces"; + private static final String PROPERTY_PNF_NAME = "pnf-name"; + private static final String PROPERTY_INTERFACE_NAME = "interface-name"; + protected final AtomicInteger skippedRowsCount = new AtomicInteger(0); + protected final AtomicInteger processedRowsCount = new AtomicInteger(0); + + private boolean success = true; + private boolean checkLog = false; + private GraphTraversalSource g = null; + protected int headerLength; + + protected final AtomicInteger falloutRowsCount = new AtomicInteger(0); + private static final String homeDir = System.getProperty("AJSC_HOME"); + private static List<String> dmaapMsgList = new ArrayList<String>(); + + public MigrateINVPhysicalInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of INV File Physical Inventory ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + String fileName = feedDir+ "inv.csv"; + logger.info(fileName); + logger.info("---------- Processing INV Entries from file ----------"); + + + try { + Map<String, Set<String>> data = loadFile(fileName); + this.processData(data); + + logger.info("\n ******* Summary Report for Inv File Physical Migration *******"); + logger.info("Number of distinct pnfs processed: "+data.keySet().size()); + logger.info("Rows processed: " + processedRowsCount); + logger.info("Rows skipped: "+ skippedRowsCount); + logger.info("Fallout Rows count: " + falloutRowsCount); + + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + protected void processData(Map<String, Set<String>> data) throws Exception{ + + for (Map.Entry<String, Set<String>> entry : data.entrySet()) { + String pnfName = entry.getKey(); + final Set<String> newPInterfaces = entry.getValue(); + Introspector pnf; + Vertex pnfVertex; + EventAction eventAction = EventAction.UPDATE; + boolean pnfChangesMade = false; + + if (pnfExists(pnfName)) { + pnf = serializer.getLatestVersionView(getPnf(pnfName)); + pnfVertex = getPnf(pnfName); + } else { + pnf = loader.introspectorFromName(NODE_TYPE_PNF); + pnf.setValue(PROPERTY_PNF_NAME, pnfName); + pnfVertex = serializer.createNewVertex(pnf); + eventAction = EventAction.CREATE; + pnfChangesMade = true; + } + + if (pnfChangesMade) { + serializer.serializeSingleVertex(pnfVertex, pnf, getMigrationName()); + logger.info ("\t Pnf [" + pnfName +"] created with vertex id "+pnfVertex); +// pnf = serializer.getLatestVersionView(pnfVertex); +// this.notificationHelper.addEvent(pnfVertex, serializer.getLatestVersionView(pnfVertex), eventAction, this.serializer.getURIForVertex(pnfVertex, false)); +// logger.info("\t Dmaap notification sent for creation of pnf "); + String dmaapMsg = System.nanoTime() + "_" + pnfVertex.id().toString() + "_" + pnfVertex.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + } else { + logger.info("\t Pnf ["+ pnfName +"] already exists "); + } + + if (!newPInterfaces.isEmpty()) { + Introspector pInterfacesIntrospector = pnf.getWrappedValue(NODE_TYPE_PINTERFACES); + if ( pInterfacesIntrospector == null) { + pInterfacesIntrospector = pnf.newIntrospectorInstanceOfProperty(NODE_TYPE_PINTERFACES); + pnf.setValue(NODE_TYPE_PINTERFACES, pInterfacesIntrospector.getUnderlyingObject()); + } + + for (Introspector introspector : pInterfacesIntrospector.getWrappedListValue(NODE_TYPE_PINTERFACE)) { + String interfaceName = introspector.getValue(PROPERTY_INTERFACE_NAME).toString(); + if (newPInterfaces.contains(interfaceName)) { + newPInterfaces.remove(interfaceName); + } + } + + for (String pInterfaceName : newPInterfaces) { + Introspector pInterface = loader.introspectorFromName(NODE_TYPE_PINTERFACE); + pInterface.setValue(PROPERTY_INTERFACE_NAME, pInterfaceName); + Vertex pInterfaceVertex = serializer.createNewVertex(pInterface); + pInterfaceVertex.property(AAIProperties.AAI_URI, pnfVertex.property(AAIProperties.AAI_URI).value() + "/p-interfaces/p-interface/" + pInterfaceName); + edgeSerializer.addTreeEdge(g, pnfVertex, pInterfaceVertex); + eventAction = EventAction.CREATE; + serializer.serializeSingleVertex(pInterfaceVertex, pInterface, getMigrationName()); + logger.info ("\t p-interface [" + pInterfaceName +"] created with vertex id "+ pInterfaceVertex + " on pnf ["+pnfName+"]"); +// pInterface = serializer.getLatestVersionView(pInterfaceVertex); +// this.notificationHelper.addEvent(pInterfaceVertex, pInterface, eventAction, this.serializer.getURIForVertex(pInterfaceVertex, false)); +// logger.info("\t Dmaap notification sent for creation of p-interface "); + String dmaapMsg = System.nanoTime() + "_" + pInterfaceVertex.id().toString() + "_" + pInterfaceVertex.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + } + } + } + } + + protected boolean pnfExists(String pnfName) { + return g.V().has(PROPERTY_PNF_NAME, pnfName).has(AAIProperties.NODE_TYPE, NODE_TYPE_PNF).hasNext(); + } + + protected Vertex getPnf(String pnfName) { + return g.V().has(PROPERTY_PNF_NAME, pnfName).has(AAIProperties.NODE_TYPE, NODE_TYPE_PNF).next(); + } + + /** + * Load file to the map for processing + * @param fileName + * @return + * @throws Exception + */ + protected Map<String,Set<String>> loadFile(String fileName) throws Exception { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + return this.getFileContents(lines); + } + + /** + * Get lines from file. + * @param lines + * @return + * @throws Exception + */ + protected Map<String,Set<String>> getFileContents(List<String> lines) throws Exception { + + final Map<String,Set<String>> fileContents = new ConcurrentHashMap<>(); + + processAndRemoveHeader(lines); + + logger.info("Total rows count excluding header: "+ lines.size()); + + lines.stream() + .filter(line -> !line.isEmpty()) + .map(line -> Arrays.asList(line.split("\\s*,\\s*", -1))) +// .filter(this::verifyLine) + .map(this::processLine) + .filter(Optional::isPresent) + .map(Optional::get) + .forEach(p -> { + processedRowsCount.getAndIncrement(); + String pnfName = p.getValue0(); + if (!fileContents.containsKey(pnfName)) { + Set<String> s = new HashSet<>(); + fileContents.put(p.getValue0(), s); + } + if (p.getValue1() != null) { + fileContents.get(p.getValue0()).add(p.getValue1()); + } + }) + ; + + return fileContents; + + + } + + /** + * Verify line has the necessary details. + * @param line + * @return + */ + protected boolean verifyLine(List<String> line) { + if (line.size() != headerLength) { + logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + line.size() + " instead."); + this.skippedRowsCount.getAndIncrement(); + return false; + } + return true; + } + + /** +* * Get the pnf name and interface name from the line. + * @param line + * @return + */ + protected Optional<Pair<String,String>> processLine(List<String> line) { + logger.info("Processing line... " + line.toString()); + int lineSize = line.size(); + if (lineSize < 11){ + logger.info("Skipping line, does not contain pnf and/or port columns"); + skippedRowsCount.getAndIncrement(); + return Optional.empty(); + } + + String pnfName = line.get(0); + String portAid = line.get(11).replaceAll("^\"|\"$", "").replaceAll("\\s+",""); + + if (pnfName.isEmpty() && portAid.isEmpty()) { + logger.info("Line missing pnf name and port " + line); + falloutRowsCount.getAndIncrement(); + return Optional.empty(); + } else if (pnfName.isEmpty()) { + logger.info("Line missing pnf name" + line); + falloutRowsCount.getAndIncrement(); + return Optional.empty(); + } else if (portAid.isEmpty()) { + logger.info("Line missing port " + line); + return Optional.of(Pair.with(pnfName, null)); + } + return Optional.of(Pair.with(pnfName, portAid)); + } + + /** + * Verify header of the csv and remove it from the list. + * @param lines + * @throws Exception + */ + protected String processAndRemoveHeader(List<String> lines) throws Exception { + String firstLine; + if (lines.isEmpty()) { + String msg = "ERROR: Missing Header in file"; + success = false; + logger.error(msg); + throw new Exception(msg); + } else { + firstLine = lines.get(0); + } + + this.headerLength = firstLine.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 21){ + String msg = "ERROR: Input file should have 21 columns"; + success = false; + logger.error(msg); + throw new Exception(msg); + } + + return lines.remove(0); + } + + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{NODE_TYPE_PNF}); + } + + @Override + public String getMigrationName() { + return "MigrateINVPhysicalInventory"; + } + +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java new file mode 100644 index 0000000..b0bacde --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java @@ -0,0 +1,713 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; + + +@MigrationPriority(29) +@MigrationDangerRating(100) +public class MigratePATHEvcInventory extends Migrator { + + private static Map<String, Vertex> portList = new HashMap<String, Vertex>(); + private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>(); + private final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc"; + private final String LAGINTERFACE_NODE_TYPE = "lag-interface"; + private final String CONFIGURATION_NODE_TYPE = "configuration"; + private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path"; + private final String FORWARDING_PATH_ID = "forwarding-path-id"; + private final String PROPERTY_CONFIGURATION_ID = "configuration-id"; + private final String PNF_NODE_TYPE = "pnf"; + private final String PROPERTY_PNF_NAME = "pnf-name"; + private final String PROPERTY_INTERFACE_NAME = "interface-name"; + private final String PINTERFACE_NODE_TYPE = "p-interface"; + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + + //Create a map to store the evcs processed where lag-interfaces were found to track the sequence of ports + //key contains the evcName + //value is a map that contains the mapping for sequence of forwarders and corresponding portAids in the order they are found + + private static Map<String, Map<Vertex, String>> pathFileMap = new HashMap<String, Map<Vertex, String>>(); + + private static int processedEvcsCount = 0; + private static int falloutEvcsCount = 0; + + //Map with lineNumber and the reason for failure for each EVC + private static Map<Integer, String> falloutEvcsList = new HashMap<Integer, String>(); + private static final String homeDir = System.getProperty("AJSC_HOME"); + private static List<String> dmaapMsgList = new ArrayList<String>(); + + + public MigratePATHEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of PATH EVC Inventory ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "path.csv"; + logger.info(fileName); + logger.info("---------- Processing PATH Entries from file ----------"); + try { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + logger.info("\n"); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + if (colList.length != headerLength) { + logger.info("ERROR: PATH line should contain " + headerLength + " columns, contains " + colList.length + " instead."); +// success = false; + continue; + } + Map<String, String> pathColValues = new HashMap<String, String>(); + pathColValues.put("evcName", colList[1]); + pathColValues.put("bearerFacingCircuit", colList[4]); + pathColValues.put("bearerCvlan", colList[6]); + pathColValues.put("bearerSvlan", colList[7]); + pathColValues.put("bearerPtniiName", colList[8]); + String bearerPortAid = colList[12].replaceAll("^\"|\"$", "").replaceAll("\\s+",""); + pathColValues.put("bearerPortAid", bearerPortAid); + pathColValues.put("collectorFacingCircuit", colList[14]); + pathColValues.put("collectorCvlan", colList[16]); + pathColValues.put("collectorSvlan", colList[17]); + pathColValues.put("collectorPtniiName", colList[18]); + String collectorPortAid = colList[22].replaceAll("^\"|\"$", "").replaceAll("\\s+",""); + pathColValues.put("collectorPortAid", collectorPortAid); + + + String evcName = pathColValues.get("evcName"); + if (!AAIConfig.isEmpty(evcName)) { + logger.info("---------- Processing Line " + line + "----------"); + logger.info("\t Evc Name = " + evcName ); + + boolean isEntryValid = validatePnfsAndPorts(pathColValues, evcName); + + if (!isEntryValid){ + logger.info("\t ERROR: Skipping processing for line containing evc-name [" +evcName+ "]"); + falloutEvcsCount++; + falloutEvcsList.put(Integer.valueOf(fileLineCounter -1 ), "["+ evcName +"] Ptnii or port does not exist"); + continue; + } + // Get the forwarding path containing forwarders + GraphTraversal<Vertex, Vertex> forwardingPathList = g.V().has(this.FORWARDING_PATH_ID, evcName).has(AAIProperties.NODE_TYPE, this.FORWARDING_PATH_NODE_TYPE) + .where(__.in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type","forwarder")); + + if (!forwardingPathList.hasNext()){ + createNewForwardersFromPATHData(pathColValues, evcName, fileLineCounter); + processedEvcsCount++; + } else { + Vertex forwardingPathVtx = forwardingPathList.next(); + List<Vertex> forwardersList = g.V(forwardingPathVtx.id()).in("org.onap.relationships.inventory.BelongsTo").toList(); + Iterator<Vertex> forwardersItr = forwardersList.iterator(); + List<String> forwarderRoleList = new ArrayList<String>(); + while (forwardersItr.hasNext()){ + Vertex forwarderVtx = forwardersItr.next(); + String role = forwarderVtx.value("forwarder-role"); + if (role!= null ){ + forwarderRoleList.add(role); + } + } + if (forwarderRoleList!= null && !forwarderRoleList.isEmpty()) { + if (forwarderRoleList.contains("ingress") && forwarderRoleList.contains("egress")){ + logger.info("\t Skipping processing for EVC[" + evcName + "] because forwarders related to this EVC already exist."); + falloutEvcsCount++; + falloutEvcsList.put(Integer.valueOf(fileLineCounter -1 ), "["+ evcName +"] Forwarders already exists for EVC"); + } else { + createNewForwardersFromPATHData(pathColValues, evcName, fileLineCounter); + } + } + } + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 24){ + logger.info("ERROR: Input file should have 24 columns"); + this.success = false; + return; + } + } + } + fileLineCounter++; + } + logger.info ("\n \n ******* Final Summary for PATH FILE Migration ********* \n"); + logger.info("Evcs processed: "+processedEvcsCount); + logger.info("Total Rows Count: "+(fileLineCounter + 1)); + logger.info("Fallout Rows Count : "+falloutEvcsCount +"\n"); + if (!falloutEvcsList.isEmpty()) { + logger.info("------ Fallout Details: ------"); + falloutEvcsList.forEach((lineNumber, errorMsg) -> { + logger.info(errorMsg + ": on row "+lineNumber.toString()); + }); + } + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + + private boolean validatePnfsAndPorts(Map<String, String> pathColValues, String evcName) { + + String collectorPtniiName = pathColValues.get("collectorPtniiName"); + String bearerPtniiName = pathColValues.get("bearerPtniiName"); + String collectorPortAid = pathColValues.get("collectorPortAid"); + String bearerPortAid = pathColValues.get("bearerPortAid"); + boolean isValid = validateCollectorPnf(collectorPtniiName, evcName) && validateBearerPnf(bearerPtniiName, evcName) + && validateCollectorPort(pathColValues, collectorPortAid, collectorPtniiName, evcName) + && validateBearerPort(pathColValues, bearerPortAid, bearerPtniiName, evcName) ; + return isValid; + } + + private boolean validateCollectorPnf(String collectorPtniiName, String evcName) { + + boolean isValid = false; + if (!AAIConfig.isEmpty(collectorPtniiName)) { + if (!pnfList.isEmpty() && pnfList.containsKey(collectorPtniiName)){ + isValid = true; + logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI"); + return isValid; + } + List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, collectorPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList(); + if (collectorPnfList != null && collectorPnfList.size() == 1) { + isValid = true; + pnfList.put(collectorPtniiName, collectorPnfList.get(0)); + logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI"); + } else if (collectorPnfList == null || collectorPnfList.size() == 0) { + logger.info("\t ERROR: Failure to find Pnf [" + collectorPtniiName + "] for EVC [" + evcName + "]"); + } + } + return isValid; + } + + private boolean validateBearerPnf(String bearerPtniiName, String evcName) { + boolean isValid = false; + if (!AAIConfig.isEmpty(bearerPtniiName)) { + if (!pnfList.isEmpty() && pnfList.containsKey(bearerPtniiName)){ + isValid = true; + logger.info("\t Pnf [" + bearerPtniiName + "] found in AAI"); + return isValid; + } + List<Vertex> bearerPnfList = g.V().has(this.PROPERTY_PNF_NAME, bearerPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList(); + if (bearerPnfList!= null && bearerPnfList.size() == 1){ + isValid = true; + pnfList.put(bearerPtniiName, bearerPnfList.get(0)); + logger.info("\t Pnf ["+ bearerPtniiName + "] found in AAI"); + } + else if (bearerPnfList == null || bearerPnfList.size() == 0) { + logger.info("\t ERROR: Failure to find Pnf ["+ bearerPtniiName + "] for EVC [" + evcName + "]"); + } + } + return isValid; + } + + private boolean validateCollectorPort(Map<String, String> pathColValues, String collectorPortAid, String collectorPtniiName, String evcName) { + boolean isValid = false; + + if (!AAIConfig.isEmpty(collectorPortAid)) { + + boolean isPortAidALagIntf = false; + GraphTraversal<Vertex, Vertex> collectorPortList; + String lagInterface = null; + + int lagIdentifierIndex = collectorPortAid.indexOf("_"); + + if (lagIdentifierIndex > 0) { + String[] subStringList = collectorPortAid.split("_"); + lagInterface = subStringList[0]; //forwarder will be related to this lagInterface + isPortAidALagIntf = true; + } + + if (isPortAidALagIntf) + { + if (!portList.isEmpty() && portList.containsKey(collectorPtniiName+"_"+lagInterface)){ + isValid = true; + logger.info("\t lag-interface [" + lagInterface + "] found in AAI"); + populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, lagInterface, portList.get(collectorPtniiName+"_"+lagInterface)); + return isValid; + } + Vertex collectorPnfVtx = pnfList.get(collectorPtniiName); + if (collectorPnfVtx == null ) { + logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]"); + return isValid; + } else { + collectorPortList = g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", lagInterface).has("aai-node-type", "lag-interface"); + + if (collectorPortList!= null && collectorPortList.hasNext()) { + Vertex lagInterfaceVtx = collectorPortList.next(); + if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) { + isValid = true; + portList.put(collectorPtniiName+"_"+lagInterface, lagInterfaceVtx); + populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, lagInterface, lagInterfaceVtx); + logger.info("\t lag-interface [" + lagInterface + + "] found in AAI"); + } + } + else if (collectorPortList == null || !collectorPortList.hasNext()) { + logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]"); + } + } + } + else if (!isPortAidALagIntf) + { + if (!portList.isEmpty() && portList.containsKey(collectorPtniiName+"_"+collectorPortAid)){ + isValid = true; + logger.info("\t p-interface [" + collectorPortAid + "] found in AAI"); + populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, collectorPortAid, portList.get(collectorPtniiName+"_"+collectorPortAid)); + return isValid; + } + + Vertex collectorPnfVtx = pnfList.get(collectorPtniiName); + if (collectorPnfVtx == null ) { + logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]"); + return isValid; + } else { + collectorPortList =g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", collectorPortAid).has("aai-node-type", "p-interface"); + + if (collectorPortList!= null && collectorPortList.hasNext()) { + Vertex pInterfaceVtx = collectorPortList.next(); + if (pInterfaceVtx != null && pInterfaceVtx.property("interface-name").isPresent()) { + isValid = true; + portList.put(collectorPtniiName+"_"+collectorPortAid, pInterfaceVtx ); + populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, collectorPortAid, pInterfaceVtx); + logger.info("\t p-interface [" + collectorPortAid + + "] found in AAI"); + } + } + else if (collectorPortList == null || !collectorPortList.hasNext()) { + logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]"); + } + } + } + } + return isValid; + } + + private boolean validateBearerPort(Map<String, String> pathColValues, String bearerPortAid, String bearerPtniiName, String evcName) { + boolean isValid = false; + + if (!AAIConfig.isEmpty(bearerPortAid)) { + GraphTraversal<Vertex, Vertex> bearerPortList; + + boolean isPortAidALagIntf = false; + GraphTraversal<Vertex, Vertex> collectorPortList; + String lagInterface = null; + + int lagIdentifierIndex = bearerPortAid.indexOf("_"); + + if (lagIdentifierIndex > 0) { + String[] subStringList = bearerPortAid.split("_"); + lagInterface = subStringList[0]; //forwarder will be related to this lagInterface + isPortAidALagIntf = true; + } + + if (isPortAidALagIntf) + { + if (!portList.isEmpty() && portList.containsKey(bearerPtniiName+"_"+lagInterface)){ + isValid = true; + logger.info("\t lag-interface [" + lagInterface + "] found in AAI"); + populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, lagInterface, portList.get(bearerPtniiName+"_"+lagInterface)); + return isValid; + } + Vertex bearerPnfVtx = pnfList.get(bearerPtniiName); + if (bearerPnfVtx == null ) { + logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]"); + return isValid; + } else { + GraphTraversal<Vertex, Vertex> lagPortList = g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", lagInterface).has("aai-node-type", "lag-interface"); + if (lagPortList!= null && lagPortList.hasNext()) { + Vertex lagInterfaceVtx = lagPortList.next(); + if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) { + isValid = true; + portList.put(bearerPtniiName+"_"+lagInterface, lagInterfaceVtx); + populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, lagInterface, lagInterfaceVtx); + logger.info("\t lag-interface [" + lagInterface + + "] found in AAI"); + } + } + else if (lagPortList == null || !lagPortList.hasNext()) { + logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]"); + } + } + } + else if (!isPortAidALagIntf) { + if (!portList.isEmpty() && portList.containsKey(bearerPtniiName+"_"+bearerPortAid)){ + isValid = true; + logger.info("\t p-interface [" + bearerPortAid + "] found in AAI"); + populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, bearerPortAid, portList.get(bearerPtniiName+"_"+bearerPortAid)); + return isValid; + } + Vertex bearerPnfVtx = pnfList.get(bearerPtniiName); + if (bearerPnfVtx == null ) { + logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for EVC [" + evcName + "]"); + return isValid; + } else { + bearerPortList = g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", bearerPortAid).has("aai-node-type","p-interface"); + if (bearerPortList!= null && bearerPortList.hasNext()){ + Vertex pInterfaceVtx = bearerPortList.next(); + if (pInterfaceVtx != null && pInterfaceVtx.property("interface-name").isPresent()) { + isValid = true; + portList.put(bearerPtniiName+"_"+bearerPortAid, pInterfaceVtx); + populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, bearerPortAid, pInterfaceVtx); + logger.info("\t p-interface [" + bearerPortAid + + "] found in AAI"); + } + } + else if (bearerPortList == null || !bearerPortList.hasNext()) { + logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for evc [" + evcName + "]"); + } + } + } + } + return isValid; + } + + private void populatePathFileMapWithForwarderInfo(String ptniiName, String evcName, String lagInterface, Vertex interfaceVtx) { + int size = 0; + Map<Vertex, String> interfaceMap = new HashMap<Vertex, String>(); + interfaceMap = pathFileMap.get(evcName); + if (interfaceMap != null && !interfaceMap.isEmpty()) { + size = interfaceMap.size(); + } + String sequence = Integer.toString(size + 1); + if (interfaceMap != null && size > 0){ + interfaceMap.put(interfaceVtx, sequence +"_"+ ptniiName+"_"+lagInterface); + } else{ + interfaceMap = new HashMap<Vertex, String>(); + interfaceMap.put(interfaceVtx, sequence +"_"+ptniiName+"_"+lagInterface ); + } + pathFileMap.put(evcName, interfaceMap); + } + + private void createNewForwardersFromPATHData(Map<String, String> pathColValues, String evcName, int fileLineCounter) { + Map<Vertex, String> forwarderMap = pathFileMap.get(evcName); + List<Vertex> forwardingPathVtxList = g.V().has(this.FORWARDING_PATH_ID, evcName).has(AAIProperties.NODE_TYPE, FORWARDING_PATH_NODE_TYPE).toList(); + if (forwardingPathVtxList != null && !forwardingPathVtxList.isEmpty()) { + Vertex forwardingPathVtx = forwardingPathVtxList.get(0); + if (forwarderMap != null && !forwarderMap.isEmpty()) { + //for each forwarder, create the new forwarder object + forwarderMap.forEach((portVtx, port) -> { + + Vertex forwarderVtx = createForwarderObject(evcName, portVtx, port, forwardingPathVtx); + if (forwarderVtx != null) { + String forwarderRole = forwarderVtx.value("forwarder-role").toString(); + Vertex configurationVtx = createConfigurationObject(evcName, portVtx, port, forwarderVtx); + createForwarderEvcObject(pathColValues, forwarderRole, portVtx, port, + configurationVtx); + } + }); + } + } else { + falloutEvcsList.put((fileLineCounter + 1), "["+ evcName +"] Forwarding-path does not exist for EVC"); + falloutEvcsCount++; + //Reduce the count of processed evcs since this EVC row cannot be processed + processedEvcsCount--; + logger.info("\t ERROR: Forwarding-path does not exist for EVC [" + evcName + "] skipping processing for this EVC."); + } + } + + private Vertex createForwarderObject(String evcName, Vertex intfVertex, String port, Vertex forwardingPathVtx) { + Vertex forwarderVtx = null; + + try { + //check if the forwarder was already created + List<Vertex> forwardersList = g.V(forwardingPathVtx.id()).in("org.onap.relationships.inventory.BelongsTo").toList(); + Iterator<Vertex> forwardersItr = forwardersList.iterator(); + while (forwardersItr.hasNext()){ + Vertex existingForwarderVtx = forwardersItr.next(); + Vertex existingIntfVtx = g.V(existingForwarderVtx).out("org.onap.relationships.inventory.ForwardsTo").toList().get(0); + if( existingIntfVtx.id().equals(intfVertex.id())) { + //this forwarder has already been created from the forwarderMap + return null; + } + } + Integer sequence = getSequenceFromPathMapPort(port); + String role = getForwarderRole(port); + + Introspector forwarder = loader.introspectorFromName("forwarder"); + forwarderVtx = serializer.createNewVertex(forwarder); + + if (sequence != null && role != null) { + forwarder.setValue("sequence", sequence); + forwarder.setValue("forwarder-role", role ); + + //Create tree edge from forwarding-path + this.createTreeEdge(forwardingPathVtx, forwarderVtx); + //Create cousin edge to p-interface or lag-interface + this.createCousinEdge(intfVertex, forwarderVtx); + + serializer.serializeSingleVertex(forwarderVtx, forwarder, "migrations"); + +// String forwarderVtxProps = this.asString(forwarderVtx); +// logger.info(" forwarderVtxProps:" + forwarderVtxProps); + + String forwarderVtxSequence = forwarderVtx.value("sequence").toString() ; + String forwarderVtxRole = forwarderVtx.value("forwarder-role").toString(); + String forwardingPathId = forwardingPathVtx.value("forwarding-path-id").toString(); + + logger.info("\t Created new forwarder " + forwarderVtx + " with sequence = " + forwarderVtxSequence + " with role [" + forwarderVtxRole + +"] as a child of forwarding-path [" + forwardingPathId + "]" ); + + String dmaapMsg = System.nanoTime() + "_" + forwarderVtx.id().toString() + "_" + forwarderVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + +// Introspector forwarderIntrospector = serializer.getLatestVersionView(forwarderVtx); +// this.notificationHelper.addEvent(forwarderVtx, forwarderIntrospector, EventAction.CREATE, this.serializer +// .getURIForVertex(forwarderVtx, false)); +// logger.info("\t Dmaap event sent for " + forwarderVtx + " for port ["+intfVertex.toString() + "] with sequence = [" + sequence + "] and role [" + role +"]" ); + } + } catch (Exception e) { + logger.info("\t ERROR: Failure to PUT forwarder for EVC [" + evcName + "]" ); + } + return forwarderVtx; + } + + private Integer getSequenceFromPathMapPort(String port) { + String[] subStringList = port.split("_"); + String sequenceStr = subStringList[0]; //forwarder will be have this sequence + if (sequenceStr != null && !sequenceStr.isEmpty()) { + return Integer.parseInt(sequenceStr); + } else { + return null; + } + + } + + private Vertex createConfigurationObject(String evcName, Vertex portVtx, String port, Vertex forwarderVtx) { + Vertex configurationVtx = null; + String configurationId = null; + try { + Introspector configuration = loader.introspectorFromName(CONFIGURATION_NODE_TYPE); + + configurationVtx = serializer.createNewVertex(configuration); + String sequence = forwarderVtx.value("sequence").toString(); + configurationId = evcName + "-" + sequence; + configuration.setValue("configuration-id", configurationId); + configuration.setValue("configuration-type", "forwarder"); + configuration.setValue("configuration-sub-type", "forwarder"); + this.createCousinEdge(forwarderVtx, configurationVtx); + serializer.serializeSingleVertex(configurationVtx, configuration, "migrations"); + + logger.info("\t Created new configuration for forwarder " + configurationVtx + " with configuration-id= " + configurationVtx.value("configuration-id").toString() ); + + String dmaapMsg = System.nanoTime() + "_" + configurationVtx.id().toString() + "_" + configurationVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); +// Introspector introspector = serializer.getLatestVersionView(configurationVtx); +// this.notificationHelper.addEvent(configurationVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(configurationVtx, false)); +// logger.info("\t Dmaap event sent for " + configurationVtx + " with configuration-id = " + configurationVtx.value("configuration-id").toString() ); + + return configurationVtx; + } catch (Exception e) { + logger.info("\t ERROR: Failure to PUT Configuration for forwarder [" + configurationId + "]" ); + } + return configurationVtx; + } + + private Vertex createForwarderEvcObject(Map<String, String> pathColValues, String forwarderRole, Vertex portVtx, String port, Vertex configurationVtx) { + Vertex forwarderEvcVtx = null; + String configurationId = null; + try { + Introspector forwarderEvc = loader.introspectorFromName(FORWARDER_EVC_NODE_TYPE); + forwarderEvcVtx = serializer.createNewVertex(forwarderEvc); + configurationId = configurationVtx.value(this.PROPERTY_CONFIGURATION_ID).toString(); + + String collectorFacingCircuit = pathColValues.get("collectorFacingCircuit"); + String bearerFacingCircuit = pathColValues.get("bearerFacingCircuit"); + String collectorCvlan = pathColValues.get("collectorCvlan"); + String bearerCvlan = pathColValues.get("bearerCvlan"); + String collectorSvlan = pathColValues.get("collectorSvlan"); + String bearerSvlan = pathColValues.get("bearerSvlan"); + + forwarderEvc.setValue("forwarder-evc-id", configurationId); + + //Don't set circuitid for forwarder-evc connected to configuration that's connected to intermediate forwarder. + if ("ingress".equalsIgnoreCase(forwarderRole)){ + forwarderEvc.setValue("circuit-id", checkForNull(collectorFacingCircuit)); + if (collectorCvlan != null && !collectorCvlan.isEmpty()) { + forwarderEvc.setValue("cvlan", collectorCvlan); + } + if (collectorSvlan != null && !collectorSvlan.isEmpty()) { + forwarderEvc.setValue("svlan", collectorSvlan); + } + } else if ("egress".equalsIgnoreCase(forwarderRole)){ + forwarderEvc.setValue("circuit-id", bearerFacingCircuit); + if (bearerCvlan != null && !bearerCvlan.isEmpty()) { + forwarderEvc.setValue("cvlan", bearerCvlan); + } + if (bearerSvlan != null && !bearerSvlan.isEmpty()) { + forwarderEvc.setValue("svlan", bearerSvlan); + } + } else { + int lastIndex = configurationId.lastIndexOf("-"); + String sequenceStr = configurationId.substring(lastIndex); + int i = Integer.parseInt(sequenceStr); + if (i%2 == 0){ + forwarderEvc.setValue("cvlan", checkForNull(bearerCvlan)); + forwarderEvc.setValue("svlan", checkForNull(bearerSvlan)); + } else { + forwarderEvc.setValue("cvlan", checkForNull(collectorCvlan)); + forwarderEvc.setValue("svlan", checkForNull(collectorSvlan)); + } + } + this.createTreeEdge(configurationVtx, forwarderEvcVtx); + serializer.serializeSingleVertex(forwarderEvcVtx, forwarderEvc, "migrations"); + + logger.info("\t Created new forwarder-evc as a child of configuration " + forwarderEvcVtx + " with forwarder-evc-id= " + forwarderEvcVtx.value("forwarder-evc-id").toString() ); + String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_" + forwarderEvcVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + +// logger.info("\t Forwarder-evc: "+ this.asString(forwarderEvcVtx)); + +// Introspector introspector = serializer.getLatestVersionView(forwarderEvcVtx); +// this.notificationHelper.addEvent(forwarderEvcVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(forwarderEvcVtx, false)); +// logger.info("\t Dmaap event sent for " + forwarderEvcVtx + " with forwarder-evc-id = " + forwarderEvcVtx.value("forwarder-evc-id").toString() ); + } catch (Exception e) { + logger.info("\t ERROR: Failure to PUT fowarder-evc for configuration [" + configurationId + "]" ); + } + return forwarderEvcVtx; + + } + + private String getForwarderRole( String port) { + String role = null; + Integer seq = getSequenceFromPathMapPort(port); + if (seq != null ) { + int sequence = seq.intValue(); + if (sequence == 1){ + role = "ingress"; + } else if (sequence > 1 && port.indexOf(".") > 0) { + role = "egress"; + } else { + role = "intermediate"; + } + } + return role; + } + + private String checkForNull(String s){ + if (s!= null && !s.isEmpty()){ + return s; + } + return null; + } + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{this.FORWARDING_PATH_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigratePATHEvcInventory"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java b/src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java new file mode 100644 index 0000000..af3d90a --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java @@ -0,0 +1,348 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; + + +@MigrationPriority(26) +@MigrationDangerRating(100) +public class MigratePATHPhysicalInventory extends Migrator { + + private static List<String> lagPortList = new ArrayList<String>(); + private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>(); + private final String LAGINTERFACE_NODE_TYPE = "lag-interface"; + private final String PNF_NODE_TYPE = "pnf"; + private final String PROPERTY_PNF_NAME = "pnf-name"; + private final String PROPERTY_INTERFACE_NAME = "interface-name"; + private final String LAG_INTERFACE_NODE_TYPE = "lag-interface"; + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static final String homeDir = System.getProperty("AJSC_HOME"); + + //Create a map to store the evcs processed where lag-interfaces were found to track the sequence of ports + //key contains the evcName + //value is a map that contains the mapping for sequence of forwarders and corresponding portAids in the order they are found + + private static Map<String, Map<Vertex, String>> pathFileMap = new HashMap<String, Map<Vertex, String>>(); + + private static int processedLagInterfacesCount = 0; + private static int skippedRowCount = 0; + //Map with lineNumber and the reason for failure for each interface + private static Map<String, String> lagInterfacesNotProcessedMap = new HashMap<String, String>(); + + + public MigratePATHPhysicalInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of PATH file Physical Inventory ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "path.csv"; + logger.info(fileName); + logger.info("---------- Processing PATH Entries from file ----------"); + try { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + logger.info("\n"); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + Map<String, String> pathColValues = new HashMap<String, String>(); + pathColValues.put("evcName", colList[1]); + pathColValues.put("bearerFacingCircuit", colList[4]); + pathColValues.put("bearerCvlan", colList[6]); + pathColValues.put("bearerSvlan", colList[7]); + pathColValues.put("bearerPtniiName", colList[8]); + pathColValues.put("bearerPortAid", colList[12]); + pathColValues.put("collectorFacingCircuit", colList[14]); + pathColValues.put("collectorCvlan", colList[16]); + pathColValues.put("collectorSvlan", colList[17]); + pathColValues.put("collectorPtniiName", colList[18]); + pathColValues.put("collectorPortAid", colList[22]); + + // For each row, check if the collector and bearerPnfs exist and create lag interfaces + + validateCollectorPnfAndCreateLagInterface(pathColValues, (fileLineCounter+1)); + validateBearerPnfAndCreateLagInterface(pathColValues, (fileLineCounter+1)); + + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 21){ + logger.info("ERROR: Input file should have 21 columns"); + this.success = false; + return; + } + } + } + fileLineCounter++; + } + logger.info ("\n \n ******* Final Summary for PATH FILE Physical Inventory Migration ********* \n"); + logger.info("Lag Interfaces processed: "+processedLagInterfacesCount); + logger.info("Total Rows Count: "+(fileLineCounter + 1)); + logger.info("Fallout Lag Interfaces Count : "+lagInterfacesNotProcessedMap.size() +"\n"); + + if (!lagInterfacesNotProcessedMap.isEmpty()) { + logger.info("------ Fallout Details: ------"); + lagInterfacesNotProcessedMap.forEach((lineEntry, errorMsg) -> { + int lineNumberIndex = lineEntry.indexOf("-"); + String lineNumber = lineEntry.substring(0, lineNumberIndex); + String portDetail = lineEntry.substring(lineNumberIndex+1); + logger.info(errorMsg + ": on row "+ lineNumber +" for PortAid ["+ portDetail+"]"); + }); + } + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + + private void validateBearerPnfAndCreateLagInterface(Map<String, String> pathColValues, int lineNumber) { + String bearerPtniiName = pathColValues.get("bearerPtniiName"); + String bearerPortAid = pathColValues.get("bearerPortAid"); + Vertex pnfVtx = getPnf(bearerPtniiName); + if (pnfVtx != null){ + //create lag-interface + createLagInterfaceObject(pnfVtx, bearerPortAid, lineNumber); + } else { + int lagIdentifierIndex = bearerPortAid.indexOf("_"); + if (lagIdentifierIndex > 0) { + lagInterfacesNotProcessedMap.put(""+ lineNumber+ "-"+bearerPtniiName+"-"+bearerPortAid+"", "Pnf ["+bearerPtniiName+"] not found" ); + } + } + + } + + private void validateCollectorPnfAndCreateLagInterface(Map<String, String> pathColValues, int lineNumber) { + String collectorPtniiName = pathColValues.get("collectorPtniiName"); + String collectorPortAid = pathColValues.get("collectorPortAid"); + Vertex pnfVtx = getPnf(collectorPtniiName); + if (pnfVtx != null){ + //create lag-interface + createLagInterfaceObject(pnfVtx, collectorPortAid, lineNumber); + }else { + int lagIdentifierIndex = collectorPortAid.indexOf("_"); + if (lagIdentifierIndex > 0) { + lagInterfacesNotProcessedMap.put(""+ lineNumber+ "-"+collectorPtniiName+"-"+collectorPortAid+"", "Pnf ["+collectorPtniiName+"] not found" ); + } + } + } + + private void createLagInterfaceObject(Vertex pnfVtx, String portAid, int lineNumber) { + String pnfName = pnfVtx.value(PROPERTY_PNF_NAME); + + if (pnfName != null && !pnfName.isEmpty()) { + + if(portAid == null || portAid.isEmpty()){ + logger.info("\t Invalid Port entry [" +portAid + "] - Invalid record - skipping..." ); + } else{ + if (!AAIConfig.isEmpty(portAid)) { + GraphTraversal<Vertex, Vertex> portList; + + boolean isPortAidALagIntf = false; + String interfaceName = null; + + int lagIdentifierIndex = portAid.indexOf("_"); + + if (lagIdentifierIndex > 0) { + String[] subStringList = portAid.split("_"); + interfaceName = subStringList[0]; + isPortAidALagIntf = true; + } + + if (isPortAidALagIntf) + { + try { + + if (lagPortList != null && lagPortList.contains(pnfName+"_"+interfaceName)){ + logger.info("\t lag-interface [" + interfaceName + "] already exists in AAI - skipping"); + return; + } + + + portList = g.V(pnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", interfaceName).has("aai-node-type", "lag-interface"); + if (portList!= null && portList.hasNext()) { + Vertex lagInterfaceVtx = portList.next(); + if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) { + logger.info("\t lag-interface [" + interfaceName + "] already exists in AAI - skipping"); +// lagInterfacesNotProcessedMap.put(""+lineNumber+"-"+pnfName+"-"+portAid+"", "lag-interface already exists for ["+interfaceName+"]" ); + } + } + else if (portList == null || !portList.hasNext()) { + //Create lag-interface in pnf + Introspector lagInterface = loader.introspectorFromName(LAG_INTERFACE_NODE_TYPE); + + Vertex lagIntVtx = serializer.createNewVertex(lagInterface); + lagInterface.setValue("interface-name", interfaceName); + this.createTreeEdge(pnfVtx, lagIntVtx); + serializer.serializeSingleVertex(lagIntVtx, lagInterface, "migrations"); + + logger.info("\t Created new lag-interface " + lagIntVtx + " with interface-name= " + lagIntVtx.value("interface-name")); + + processedLagInterfacesCount++; + lagPortList.add(pnfName+"_"+interfaceName); + + String dmaapMsg = System.nanoTime() + "_" + lagIntVtx.id().toString() + "_" + lagIntVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); +// Introspector introspector = serializer.getLatestVersionView(lagIntVtx); +// this.notificationHelper.addEvent(lagIntVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(lagIntVtx, false)); +// logger.info("\t Dmaap event sent for " + lagIntVtx + " with interface-name= " + lagIntVtx.value("interface-name").toString() ); + } + } catch (Exception e) { + logger.info("\t ERROR: Failure to create lag-interface ["+ interfaceName + "]"); + lagInterfacesNotProcessedMap.put(""+lineNumber+"-"+pnfName+"-"+portAid+"", "Failed to create lag-interface ["+interfaceName+"]" ); + } + } + else + { + logger.info("\t Port-Aid[" +portAid +"] on PNF["+pnfName+"] not a lag-interface, skipping...."); + } + } + + } + } + } + + + private Vertex getPnf(String ptniiName) { + Vertex pnfVtx = null; + if (!AAIConfig.isEmpty(ptniiName)) { + if (!pnfList.isEmpty() && pnfList.containsKey(ptniiName)){ + return pnfList.get(ptniiName); + } + List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, ptniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList(); + if (collectorPnfList != null && collectorPnfList.size() == 1) { + pnfVtx = collectorPnfList.get(0); + pnfList.put(ptniiName, pnfVtx); + logger.info("\t Pnf [" + ptniiName + "] found in AAI"); + } else if (collectorPnfList == null || collectorPnfList.size() == 0) { + logger.info("\t ERROR: Failure to find Pnf [" + ptniiName + "]" ); + } + } else { + logger.info("\t ERROR: Failure to find Pnf [" + ptniiName + "]" ); + } + return pnfVtx; + } + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{this.LAG_INTERFACE_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigratePATHPhysicalInventory"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java new file mode 100644 index 0000000..e05999d --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java @@ -0,0 +1,551 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v12; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Optional; +import java.util.List; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; + + +@MigrationPriority(27) +@MigrationDangerRating(100) +public class MigrateSAREvcInventory extends Migrator { + + private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>(); + private static List<String> portList = new ArrayList<String>();; + private final String SAREA_GLOBAL_CUSTOMER_ID = "8a00890a-e6ae-446b-9dbe-b828dbeb38bd"; + private final String CONFIGURATION_NODE_TYPE = "configuration"; + private final String SERVICE_INSTANCE_NODE_TYPE = "service-instance"; + private final String SERVICE_SUBSCRIPTON_NODE_TYPE = "service-subscription"; + private final String PROPERTY_SERVICE_TYPE = "service-type"; + private final String SERVICE_INSTANCE_ID = "service-instance-id"; + private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path"; + private final String FOWARDING_PATH_ID = "forwarding-path-id"; + private final String EVC_NODE_TYPE = "evc"; + private final String PROPERTY_CONFIGURATION_ID = "configuration-id"; + private final String PNF_NODE_TYPE = "pnf"; + private final String PROPERTY_PNF_NAME = "pnf-name"; + private final String PROPERTY_INTERFACE_NAME = "interface-name"; + private final String PINTERFACE_NODE_TYPE = "p-interface"; + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + + private static int processedEvcsCount = 0; + private static int falloutEvcsCount = 0; + private static Map<String, String> falloutEvcsMap = new HashMap<String, String>(); + + private static final String homeDir = System.getProperty("AJSC_HOME"); + private static List<String> dmaapMsgList = new ArrayList<String>(); + + public MigrateSAREvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration of SAR EVC Inventory ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "sar.csv"; + logger.info(fileName); + logger.info("---------- Processing SAR Entries from file ----------"); + + try { + String line; + + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + line = lineItr.next().replace("\n", "").replace("\r", ""); + logger.info("\n"); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); +// if (colList.length != headerLength) { +// logger.info("ERROR: SAR line should contain " + headerLength + " columns, contains " + colList.length + " instead."); +// success = false; +// continue; +// } + Map<String, String> sarColValues = new HashMap<String, String>(); + sarColValues.put("evcName", colList[0]); + sarColValues.put("subscriberName", colList[1]); + sarColValues.put("espName", colList[2]); + sarColValues.put("bearerCircuitId", colList[3]); + sarColValues.put("bearerTagMode", colList[4]); + sarColValues.put("bearerCvlan", colList[5]); + sarColValues.put("bearerSvlan", colList[6]); + sarColValues.put("bearerPtniiName", colList[7]); + sarColValues.put("bearerSlotName", colList[8]); + String bearerPortAid = colList[9].replaceAll("^\"|\"$", "").replaceAll("\\s+",""); + sarColValues.put("bearerPortAid", bearerPortAid); + sarColValues.put("bearerPortType", colList[10]); + sarColValues.put("collectorCircuitId", colList[11]); + sarColValues.put("collectorTagMode", colList[12]); + sarColValues.put("collectorCvlan", colList[13]); + sarColValues.put("collectorSvlan", colList[14]); + sarColValues.put("collectorPtniiName", colList[15]); + sarColValues.put("collectorSlotName", colList[16]); + String collectorPortAid = colList[17].replaceAll("^\"|\"$", "").replaceAll("\\s+",""); + sarColValues.put("collectorPortAid", collectorPortAid); + sarColValues.put("collectorPortType", colList[18]); + sarColValues.put("espEvcCircuitId", colList[19]); + sarColValues.put("evcAccessCIR", colList[20]); + + String evcName = sarColValues.get("evcName"); + if (!AAIConfig.isEmpty(evcName)) { + logger.info("---------- Processing Line " + line + "----------"); + logger.info("\t Evc Name = " + evcName ); + + boolean isEntryValid = validatePnfsAndPorts(sarColValues, evcName); + + if (!isEntryValid){ + logger.info("\t ERROR: Skipping processing for line containing evc-name [" +evcName+ "]"); + falloutEvcsCount++; + falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - PortAid/Pnf does not exist" ); + fileLineCounter++; + continue; + } + + createNewObjectsFromSARFile(sarColValues, evcName, fileLineCounter); + + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 21){ + logger.info("ERROR: Input file should have 21 columns"); + this.success = false; + return; + } + } + } + fileLineCounter++; + } + + logger.info ("\n \n ******* Final Summary for SAR FILE Migration ********* \n"); + logger.info("Evcs processed: "+processedEvcsCount); + logger.info("Fallout Evcs count: "+falloutEvcsCount); + if (!falloutEvcsMap.isEmpty()) { + logger.info("------ Fallout Details: ------"); + falloutEvcsMap.forEach((lineNumber, errorMsg) -> { + logger.info(errorMsg + ": on row "+lineNumber.toString()); + }); + } + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + + private boolean validatePnfsAndPorts(Map<String, String> sarColValues, String evcName) { + + String collectorPtniiName = sarColValues.get("collectorPtniiName"); + String bearerPtniiName = sarColValues.get("bearerPtniiName"); + String collectorPortAid = sarColValues.get("collectorPortAid"); + String bearerPortAid = sarColValues.get("bearerPortAid"); + boolean isValid = validateCollectorPnf(collectorPtniiName, evcName) && validateBearerPnf(bearerPtniiName, evcName) + && validateCollectorPort(collectorPortAid, collectorPtniiName, evcName) + && validateBearerPort(bearerPortAid, bearerPtniiName, evcName) ; + return isValid; + } + + private boolean validateCollectorPnf(String collectorPtniiName, String evcName) { + + boolean isValid = false; + if (!AAIConfig.isEmpty(collectorPtniiName)) { + if (!pnfList.isEmpty() && pnfList.containsKey(collectorPtniiName)){ + isValid = true; + logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI"); + return isValid; + } + List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, collectorPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList(); + if (collectorPnfList != null && collectorPnfList.size() == 1) { + isValid = true; + pnfList.put(collectorPtniiName, collectorPnfList.get(0)); + logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI"); + } else if (collectorPnfList == null || collectorPnfList.size() == 0) { + logger.info("\t ERROR: Failure to find Pnf [" + collectorPtniiName + "] for EVC [" + evcName + "]"); + } + } + return isValid; + } + + private boolean validateBearerPnf(String bearerPtniiName, String evcName) { + boolean isValid = false; + if (!AAIConfig.isEmpty(bearerPtniiName)) { + if (!pnfList.isEmpty() && pnfList.containsKey(bearerPtniiName)){ + isValid = true; + logger.info("\t Pnf [" + bearerPtniiName + "] found in AAI"); + return isValid; + } + List<Vertex> bearerPnfList = g.V().has(this.PROPERTY_PNF_NAME, bearerPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList(); + if (bearerPnfList!= null && bearerPnfList.size() == 1){ + isValid = true; + pnfList.put(bearerPtniiName, bearerPnfList.get(0)); + logger.info("\t Pnf ["+ bearerPtniiName + "] found in AAI"); + } + else if (bearerPnfList == null || bearerPnfList.size() == 0) { + logger.info("\t ERROR: Failure to find Pnf ["+ bearerPtniiName + "] for EVC [" + evcName + "]"); + } + } + return isValid; + } + + private boolean validateCollectorPort(String collectorPortAid, String collectorPtniiName, String evcName) { + boolean isValid = false; + if (!AAIConfig.isEmpty(collectorPortAid)) { + if (!portList.isEmpty() && portList.contains(collectorPtniiName+"_"+collectorPortAid)){ + isValid = true; + logger.info("\t Port ["+ collectorPortAid + "] found in AAI"); + return isValid; + } + GraphTraversal<Vertex, Vertex> collectorPortList; + Vertex collectorPnfVtx = pnfList.get(collectorPtniiName); + if (collectorPnfVtx == null ) { + logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]"); + return isValid; + } else { + collectorPortList =g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", collectorPortAid).has("aai-node-type", "p-interface"); + if (collectorPortList!= null && collectorPortList.hasNext()) { + isValid = true; + portList.add(collectorPtniiName+"_"+collectorPortAid); + logger.info("\t Port ["+ collectorPortAid + "] found in AAI"); + } + else if (collectorPortList == null || !collectorPortList.hasNext()) { + logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]"); + } + } + } + return isValid; + } + + private boolean validateBearerPort(String bearerPortAid, String bearerPtniiName, String evcName) { + boolean isValid = false; + + if (!AAIConfig.isEmpty(bearerPortAid)) { + if (!portList.isEmpty() && portList.contains(bearerPtniiName+"_"+bearerPortAid)){ + isValid = true; + logger.info("\t Port ["+ bearerPortAid + "] found in AAI"); + return isValid; + } + GraphTraversal<Vertex, Vertex> bearerPortList; + Vertex bearerPnfVtx = pnfList.get(bearerPtniiName); + if (bearerPnfVtx == null ) { + logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for EVC [" + evcName + "]"); + return isValid; + } else { + bearerPortList =g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", bearerPortAid).has("aai-node-type", "p-interface"); + if (bearerPortList!= null && bearerPortList.hasNext()){ + isValid = true; + portList.add(bearerPtniiName+"_"+bearerPortAid); + logger.info("\t Port ["+ bearerPortAid + "] found in AAI"); + } + else if (bearerPortList == null || !bearerPortList.hasNext()) { + logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for evc [" + evcName + "]"); + } + } + } + return isValid; + } + + private void createNewObjectsFromSARFile(Map<String, String> sarColValues, String evcName, int lineNumber) { + Vertex serviceInstanceVtx = createNewServiceInstanceFromSARData(sarColValues, evcName, lineNumber); + if (serviceInstanceVtx != null && serviceInstanceVtx.property("service-instance-id").isPresent()) { + Vertex forwardingPathVtx = createNewForwardingPathFromSARData(sarColValues, serviceInstanceVtx, lineNumber); + Vertex configurationVtx = createNewConfigurationFromSARData(sarColValues, forwardingPathVtx, lineNumber); + Vertex evcVtx = createNewEvcFromSARData(sarColValues, configurationVtx, lineNumber); + } + } + + private Vertex createNewServiceInstanceFromSARData(Map<String, String> sarColValues, String evcName, int lineNumber) { + + String serviceType = "SAREA"; + Vertex serviceInstanceVtx = null; + + try { + + GraphTraversal<Vertex, Vertex> servSubVtxList = g.V().has("global-customer-id", SAREA_GLOBAL_CUSTOMER_ID) + .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA"); + + if (servSubVtxList!= null && servSubVtxList.hasNext()){ + Vertex serviceSubscriptionVtx = servSubVtxList.next(); + if (serviceSubscriptionVtx != null ) { + + List<Vertex> existingServInstVtxList = g.V(serviceSubscriptionVtx).in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "service-instance") + .has("service-instance-id",evcName).toList(); + + if (existingServInstVtxList!= null && existingServInstVtxList.size() >0){ + logger.info("\t service-instance already exists for evc " + evcName + " - skipping"); + + return existingServInstVtxList.iterator().next(); + } + else if (existingServInstVtxList!= null && existingServInstVtxList.size() == 0) { + Introspector servInstance = loader.introspectorFromName("service-instance"); + serviceInstanceVtx = serializer.createNewVertex(servInstance); + String serviceInstanceId = (String) sarColValues.get("evcName"); + servInstance.setValue("service-instance-id", serviceInstanceId); + servInstance.setValue("service-type", serviceType); + this.createTreeEdge(serviceSubscriptionVtx, serviceInstanceVtx); + serializer.serializeSingleVertex(serviceInstanceVtx, servInstance, "migrations"); + + logger.info("\t Created new service-instance " + serviceInstanceVtx + " with service-instance-id = " + serviceInstanceId ); + + String dmaapMsg = System.nanoTime() + "_" + serviceInstanceVtx.id().toString() + "_" + serviceInstanceVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + processedEvcsCount++; + } + else { + logger.info("\t ERROR: More than one service-instance found for evc-name: " + evcName); + } + } + } else { + logger.info("\t ERROR: SAREA Subscription not found for Customer ["+SAREA_GLOBAL_CUSTOMER_ID+"]"); + falloutEvcsCount++; + falloutEvcsMap.put((lineNumber+1)+"", "["+evcName+"] - SAREA Subscription not found for Customer ["+SAREA_GLOBAL_CUSTOMER_ID+"]" ); + } + } catch (Exception e) { + logger.info("\t ERROR: Failure to PUT service-instance for EVC [" + evcName + "]" ); + falloutEvcsCount++; + falloutEvcsMap.put((lineNumber+1)+"", "["+evcName+"] - Failure to PUT service-instance for EVC" ); + } + return serviceInstanceVtx; + + } + + private Vertex createNewForwardingPathFromSARData(Map<String, String> sarColValues, Vertex serviceInstanceVtx, int lineNumber) { + Vertex fpVertex = null; + String serviceInstanceId = serviceInstanceVtx.value(this.SERVICE_INSTANCE_ID); + + try { + + List<Vertex> fpList = g.V(serviceInstanceVtx).in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type","forwarding-path") + .has("forwarding-path-id", serviceInstanceId).toList(); + if (fpList != null && !fpList.isEmpty()){ + logger.info("\t forwarding-path already exists for evc " + serviceInstanceId + " - skipping"); + return fpList.iterator().next(); + } + + //If forwarding-path does not exist, create it + Introspector fpIntrospector = loader.introspectorFromName(FORWARDING_PATH_NODE_TYPE); + fpVertex = serializer.createNewVertex(fpIntrospector); + + fpIntrospector.setValue("forwarding-path-id", serviceInstanceId); + fpIntrospector.setValue("forwarding-path-name", serviceInstanceId); + this.createCousinEdge(fpVertex, serviceInstanceVtx); + serializer.serializeSingleVertex(fpVertex, fpIntrospector, "migrations"); + + logger.info("\t Created new forwarding-path " + fpVertex + " with forwarding-path-id = " + fpVertex.value("forwarding-path-id").toString() ); + String dmaapMsg = System.nanoTime() + "_" + fpVertex.id().toString() + "_" + fpVertex.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + + } catch (Exception e) { + logger.info("\t ERROR: Failure to PUT forwarding-path for EVC [" + serviceInstanceId + "]" ); + processedEvcsCount--; + falloutEvcsCount++; + falloutEvcsMap.put((lineNumber+1)+"", "["+serviceInstanceId+"] - Failure to PUT forwarding-path for EVC" ); + } + return fpVertex; + } + + private Vertex createNewConfigurationFromSARData(Map<String, String> sarColValues, Vertex forwardingPathVtx, int lineNumber) { + + Vertex configurationVtx = null; + String forwardingPathId = forwardingPathVtx.value(this.FOWARDING_PATH_ID); + try { + + List<Vertex> configList = g.V(forwardingPathVtx).out("org.onap.relationships.inventory.Uses").has("aai-node-type","configuration") + .has("configuration-id", forwardingPathId).toList(); + if (configList != null && !configList.isEmpty()){ + logger.info("\t configuration already exists for evc " + forwardingPathId + " - skipping"); + return configList.iterator().next(); + } + + //If configuration does not exist, create it + Introspector configuration = loader.introspectorFromName(CONFIGURATION_NODE_TYPE); + configurationVtx = serializer.createNewVertex(configuration); + + configuration.setValue("configuration-id", forwardingPathId); + configuration.setValue("configuration-type", "forwarding-path"); + configuration.setValue("configuration-sub-type", "evc"); + this.createCousinEdge(forwardingPathVtx, configurationVtx); + serializer.serializeSingleVertex(configurationVtx, configuration, "migrations"); + + logger.info("\t Created new configuration for forwarding-path " + configurationVtx + " with configuration-id= " + configurationVtx.value("configuration-id").toString() ); + + String dmaapMsg = System.nanoTime() + "_" + configurationVtx.id().toString() + "_" + configurationVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + + }catch (Exception e) { + logger.info("\t ERROR: Failure to PUT configuration for EVC [" + forwardingPathId + "]" ); + processedEvcsCount--; + falloutEvcsCount++; + falloutEvcsMap.put((lineNumber+1)+"", "["+forwardingPathId+"] - Failure to PUT configuration for EVC" ); + } + return configurationVtx; + } + + private Vertex createNewEvcFromSARData(Map<String, String> sarColValues, Vertex configurationVtx, int lineNumber) { + String evcId = null; + Vertex evcVtx = null; + try { + Introspector evc = loader.introspectorFromName(EVC_NODE_TYPE); + evcVtx = serializer.createNewVertex(evc); + evcId = configurationVtx.value(this.PROPERTY_CONFIGURATION_ID); + + String cir = sarColValues.get("evcAccessCIR"); + int length = cir.length(); + String cirValue = cir.substring(0,(length-4)); + String cirUnits = cir.substring((length-4), (length)); + + String espEvcCircuitId = sarColValues.get("espEvcCircuitId"); + String espName = sarColValues.get("espName"); + String collectorTagMode = sarColValues.get("collectorTagMode"); + String bearerTagMode = sarColValues.get("bearerTagMode"); + + evc.setValue("evc-id", evcId); + evc.setValue("forwarding-path-topology", "PointToPoint"); + evc.setValue("cir-value", checkForNull(cirValue)); + evc.setValue("cir-units", checkForNull(cirUnits)); + evc.setValue("esp-evc-circuit-id", checkForNull(espEvcCircuitId)); + evc.setValue("esp-evc-cir-value", checkForNull(cirValue)); + evc.setValue("esp-evc-cir-units", checkForNull(cirUnits)); + evc.setValue("esp-itu-code", checkForNull(espName)); + evc.setValue("tagmode-access-ingress", checkForNull(collectorTagMode)); + evc.setValue("tagmode-access-egress", checkForNull(bearerTagMode)); + this.createTreeEdge(configurationVtx, evcVtx); + serializer.serializeSingleVertex(evcVtx, evc, "migrations"); + + logger.info("\t Created new evc as a child of configuration " + evcVtx + " with evc-id= " + evcVtx.value("evc-id").toString() ); + String dmaapMsg = System.nanoTime() + "_" + evcVtx.id().toString() + "_" + evcVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + +// Introspector introspector = serializer.getLatestVersionView(evcVtx); +// this.notificationHelper.addEvent(evcVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(evcVtx, false)); +// logger.info("\t Dmaap event sent for " + evcVtx + " with evc-id = " + evcId); + }catch (Exception e) { + logger.info("\t ERROR: Failure to PUT EVC for evc-name [" + evcId + "]" ); + processedEvcsCount--; + falloutEvcsCount++; + falloutEvcsMap.put((lineNumber+1)+"", "["+evcId+"] - Failure to PUT EVC" ); + } + return evcVtx; + + } + + private String checkForNull(String s){ + if (s!= null && !s.isEmpty()){ + return s; + } + return null; + } + + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{this.SERVICE_INSTANCE_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateSAREvcInventory"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java new file mode 100644 index 0000000..5f74835 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java @@ -0,0 +1,343 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v13; + +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@Enabled +@MigrationPriority(20) +@MigrationDangerRating(100) +public class MigrateBadWidgetModelsPartOne extends EdgeSwingMigrator { + private boolean success = true; + private final GraphTraversalSource g; + private int candidateCount = 0; + private int nqEdgeCount = 0; + + // migration restrictions that we will use for this migration + private final String NODE_TYPE_RESTRICTION = "named-query-element"; + private final String EDGE_LABEL_RESTRICTION = "org.onap.relationships.inventory.IsA"; + private final String EDGE_DIR_RESTRICTION = "IN"; + + GraphTraversal<Vertex, Vertex> widgetModelTraversal; + GraphTraversal<Vertex, Vertex> widgetModelVersionTraversal; + GraphTraversal<Vertex, Vertex> validModVerTraversal; + GraphTraversal<Vertex, Vertex> widgetModelNqEdgeTraversal; + + + + public MigrateBadWidgetModelsPartOne(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{"model", "named-query-element"}); + } + + @Override + public String getMigrationName() { + return "MigrateBadWidgetModelsPartOne"; + } + + + /** + * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate. + * @return + */ + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + + List<Pair<Vertex, Vertex>> fromToVertPairList = new ArrayList<Pair<Vertex, Vertex>>(); + ArrayList <Vertex> badModVtxList = new <Vertex> ArrayList (); + + logAndPrintInfo("--------- GET AFFECTED NODE PAIRS -------------"); + // Read the json file to populate the validWidgetModelVesionIdHash and also + // validWidgetModelInvIdHash which will be used to figure out which data is in the db with + // an invalid id. + ArrayList <String> fileLines = readInValidWidgetInfoFile(); + + // validWidgetModelVersionIdHash: key = nodeType, value = validModelVersionId for that nodeType + // Note - we currently only have one valid version per model for widget models. + HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines ); + + // See what (widget) models are being used in the DB + widgetModelTraversal = this.engine.asAdmin().getTraversalSource().V() + .has("aai-node-type", "model") + .has("model-type", "widget"); + + if(!(widgetModelTraversal.hasNext())){ + logAndPrintInfo("unable to find widget models in database. "); + } + + while (widgetModelTraversal.hasNext()) { + Vertex widgetModVertexInDb = widgetModelTraversal.next(); + String invId = widgetModVertexInDb.property("model-invariant-id").value().toString(); + if( validModelInvariantIdHash.containsValue(invId) ){ + // This is a valid model, we don't need to do anything with it. + continue; + } + // For this bad widget model, need to look at the model-version node to + // find out what type of widget it is supposed to be so we can look up the correct invId. + // Note - We expect just one per model, but there could be more. + logAndPrintInfo(" Found invalid widget model-invariant-id = [" + invId + "]."); + + // We're using badModIdList to help us figure out how many bad edges go with the + // bad model nodes - which is really just for logging purposes. + badModVtxList.add(widgetModVertexInDb); + + widgetModelVersionTraversal = this.engine.asAdmin().getTraversalSource() + .V(widgetModVertexInDb) + .in("org.onap.relationships.inventory.BelongsTo") + .has("aai-node-type", "model-ver"); + + if(!(widgetModelVersionTraversal.hasNext())){ + logAndPrintInfo("unable to find widget model version in database for model-invariant-id = [" + invId + "]."); + } + + while (widgetModelVersionTraversal.hasNext()) { + Vertex widgetModVersionVertex = widgetModelVersionTraversal.next(); + String nodeType = widgetModVersionVertex.property("model-name").value().toString(); + logAndPrintInfo(" nodeType that goes with invalid widget model-invariant-id = [" + invId + "] is: [" + nodeType + "]."); + + // Now we can use the nodeType to find the correct/valid model-invariant-id to use + if( validModelInvariantIdHash.containsKey(nodeType) ){ + // We know what the model-invariant-id SHOULD be, so swing edges from the invalid node to this valid one. + String validModInvId = validModelInvariantIdHash.get(nodeType); + Iterator<Vertex> toVtxItr= + this.g.V().has("model-invariant-id",validModInvId).has(AAIProperties.NODE_TYPE, "model"); + int ct = 0; + while(toVtxItr.hasNext()) { + Vertex toValidVert = toVtxItr.next(); + ct++; + if( ct == 1 ){ + fromToVertPairList.add(new Pair<>(widgetModVertexInDb, toValidVert)); + } + else { + logAndPrintInfo("ERROR - More than one model node found for model-invariant-id = [" + validModInvId + "]."); + } + } + if( ct == 0 ){ + logAndPrintInfo("unable to find model node in database for valid model-invariant-id = [" + validModInvId + "]."); + } + } + else { + logAndPrintInfo("unable to find a valid widget model in database for model-name = [" + nodeType + "]."); + } + } + } + candidateCount = fromToVertPairList.size(); + + // For each of the bad model nodes, see how many actually have an IN edge from a named-query-element + for( int i = 0; i < badModVtxList.size(); i++ ){ + widgetModelNqEdgeTraversal = this.engine.asAdmin().getTraversalSource() + .V(badModVtxList.get(i)) + .in("org.onap.relationships.inventory.IsA") + .has("aai-node-type", "named-query-element"); + + if(widgetModelNqEdgeTraversal.hasNext()) { + nqEdgeCount++; + } + } + + return fromToVertPairList; + } + + + public String getNodeTypeRestriction(){ + return NODE_TYPE_RESTRICTION; + } + + public String getEdgeLabelRestriction(){ + return EDGE_LABEL_RESTRICTION; + } + + public String getEdgeDirRestriction(){ + return EDGE_DIR_RESTRICTION; + } + + /** + * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate. + * @return + */ + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + + // Cleanup of model nodes will be done by the other migration script after the + // model-ver records have edges swung off of them. + + // We're just going to give count of how many of these edges were found. + logAndPrintInfo(" >>>> SUMMARY for Migration of named-query-element to model edges: "); + logAndPrintInfo(" >>>> Count of bad widget model nodes found: " + candidateCount ); + logAndPrintInfo(" >>>> Count of bad widget model nodes that have named-query-element edges: " + nqEdgeCount ); + + } + + private ArrayList <String> readInValidWidgetInfoFile(){ + + ArrayList <String> fileLines = new ArrayList <String> (); + String homeDir = System.getProperty("AJSC_HOME"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logAndPrintInfo("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return fileLines; + } + if (configDir == null) { + logAndPrintInfo("ERROR: Could not find sys prop BUNDLECONFIG_DIR"); + success = false; + return fileLines; + } + String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/widget-model-migration-data/widget-model-migration-input.csv"; + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String modelInfoLine; + while ((modelInfoLine = br.readLine()) != null) { + modelInfoLine = modelInfoLine.replace("\n", "").replace("\r", ""); + if (!modelInfoLine.isEmpty()) { + fileLines.add(modelInfoLine); + } + } + + } + catch (FileNotFoundException e) { + logger.error("ERROR: Could not find file " + fileName, e); + success = false; + } catch (IOException e) { + logger.error("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.error("encountered exception", e); + e.printStackTrace(); + success = false; + } + return fileLines; + } + + + HashMap <String,String> getModelVersionIdHash( ArrayList <String> fileLines ){ + + HashMap <String, String> versionIdHash = new HashMap <String,String> (); + + if( fileLines == null ){ + logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash"); + success = false; + return versionIdHash; + } + + for(int i = 0; i < fileLines.size(); i++ ){ + String mLine = fileLines.get(i); + String[] fields = mLine.split("\\,"); + if (fields.length != 3) { + logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line."); + success = false; + } + else { + versionIdHash.put(fields[0],fields[1]); + } + } + + // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is + // the correct model info for "virtual-data-center". Problem is that there is no vdc nodeType, but + // there are named-queries pointing at a bad widget-model for "vdc". + String virtDataCenterVerId = versionIdHash.get("virtual-data-center"); + if( virtDataCenterVerId != null ){ + versionIdHash.put("vdc",virtDataCenterVerId ); + } + + return versionIdHash; + } + + + HashMap <String,String> getModelInvariantIdHash( ArrayList <String> fileLines ){ + HashMap <String, String> invIdHash = new HashMap <String,String> (); + + if( fileLines == null ){ + logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash"); + success = false; + return invIdHash; + } + + for(int i = 0; i < fileLines.size(); i++ ){ + String mLine = fileLines.get(i); + String[] fields = mLine.split("\\,"); + if (fields.length != 3) { + logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line."); + success = false; + } + else { + invIdHash.put(fields[0],fields[2]); + } + } + + // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is + // the correct model info for "virtual-data-center". Problem is that there is no vdc nodeType, but + // there are named-queries pointing at a bad widget-model for "vdc". + String virtDataCenterInvId = invIdHash.get("virtual-data-center"); + if( invIdHash != null ){ + invIdHash.put("vdc",virtDataCenterInvId ); + } + + return invIdHash; + } + + /** + * Log and print. + * + * @param msg + * the msg + */ + protected void logAndPrintInfo(String msg) { + System.out.println(msg); + logger.info(msg); + } + + + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java new file mode 100644 index 0000000..bb525c3 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java @@ -0,0 +1,508 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v13; + +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@Enabled +@MigrationPriority(21) +@MigrationDangerRating(100) +public class MigrateBadWidgetModelsPartTwo extends EdgeSwingMigrator { + private boolean success = true; + private final GraphTraversalSource g; + + // NOTE -- this migration is for "model-ver" nodes only. It needs to be run AFTER + // the MigrateWidgetModelsPartOne. + // + + // migration restrictions that we will use for this migration + private final String NODE_TYPE_RESTRICTION = "model-element"; + private final String EDGE_LABEL_RESTRICTION = "org.onap.relationships.inventory.IsA"; + private final String EDGE_DIR_RESTRICTION = "IN"; + + GraphTraversal<Vertex, Vertex> widgetModelTraversal; + GraphTraversal<Vertex, Vertex> widgetModelVersionTraversal; + GraphTraversal<Vertex, Vertex> validModVerTraversal; + + + + public MigrateBadWidgetModelsPartTwo(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{"model", "model-element", "model-ver"}); + } + + @Override + public String getMigrationName() { + return "MigrateBadWidgetModelsPartTwo"; + } + + + /** + * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate. + * @return + */ + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + logAndPrintInfo("--------- GET AFFECTED NODE PAIRS -------------"); + // Read the json file to populate the validWidgetModelVesionIdHash and also + // validWidgetModelInvIdHash which will be used to figure out which data is in the db with + // an invalid id. + ArrayList <String> fileLines = readInValidWidgetInfoFile(); + + // validWidgetModelVersionIdHash: key = nodeType, value = validModelVersionId for that nodeType + // Note - we currently only have one valid version per model for widget models. + HashMap <String,String> validModelVersionIdHash = getModelVersionIdHash( fileLines ); + + // validWidgetModelVersionIdHash: key = nodeType, value = validModelVersionId for that nodeType + // Note - we currently only have one valid version per model for widget models. + HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines ); + + // Now we will see what is actually in the DB + List<Pair<Vertex, Vertex>> fromToVertPairList = new ArrayList<Pair<Vertex, Vertex>>(); + widgetModelTraversal = this.engine.asAdmin().getTraversalSource().V() + .has("aai-node-type", "model") + .has("model-type", "widget"); + + if(!(widgetModelTraversal.hasNext())){ + logAndPrintInfo("unable to find widget models in database. "); + } + + int validModelVerCount = 0; + while (widgetModelTraversal.hasNext()) { + Vertex widgetModVertex = widgetModelTraversal.next(); + String invId = widgetModVertex.property("model-invariant-id").value().toString(); + + // Find the model-version nodes that belong to this model. + // We expect just one per model, but there could be more. + widgetModelVersionTraversal = this.engine.asAdmin().getTraversalSource() + .V(widgetModVertex) + .in("org.onap.relationships.inventory.BelongsTo") + .has("aai-node-type", "model-ver"); + + if(!(widgetModelVersionTraversal.hasNext())){ + logAndPrintInfo("unable to find widget model version in database for model-invariant-id = [" + invId + "]."); + } + + while (widgetModelVersionTraversal.hasNext()) { + Vertex widgetModVersionVertex = widgetModelVersionTraversal.next(); + String modVersionIdInDb = widgetModVersionVertex.property("model-version-id").value().toString(); + String nodeType = widgetModVersionVertex.property("model-name").value().toString(); + + if( validModelVersionIdHash.containsKey(nodeType) ){ + // We know what the model-version-id SHOULD be, so make sure we're using it. + String validModVerId = validModelVersionIdHash.get(nodeType); + if( !modVersionIdInDb.equals(validModVerId) ){ + logAndPrintInfo(" Bad model-version-id found in DB for model-name = " + nodeType + ", verId = " + modVersionIdInDb ); + validModVerTraversal = this.engine.asAdmin().getTraversalSource() + .V() + .has("model-version-id",validModVerId) + .has("aai-node-type","model-ver"); + if(!(validModVerTraversal.hasNext())){ + logAndPrintInfo("unable to find widget model version in database for valid model-version-id = [" + validModVerId + "]."); + } + int ct = 0; + while (validModVerTraversal.hasNext()) { + ct++; + if( ct > 1 ){ + logAndPrintInfo("ERROR - More than one model-ver found for model-version-id = [" + validModVerId + "]."); + break; + } + Vertex toVert = validModVerTraversal.next(); + fromToVertPairList.add(new Pair<>(widgetModVersionVertex, toVert)); + } + } + else { + validModelVerCount++; + logAndPrintInfo("Valid model-version-id used in DB for model-name = [" + nodeType + "]."); + } + } + else { + logAndPrintInfo("unable to find a valid widget model-ver in database for model-name = [" + nodeType + "]."); + } + } + } + + return fromToVertPairList; + } + + + public String getNodeTypeRestriction(){ + return NODE_TYPE_RESTRICTION; + } + + public String getEdgeLabelRestriction(){ + return EDGE_LABEL_RESTRICTION; + } + + public String getEdgeDirRestriction(){ + return EDGE_DIR_RESTRICTION; + } + + /** + * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate. + * @return + */ + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + + // The first node in each pair is the model-ver that we were migrating edges AWAY FROM because + // it is an invalid model-ver node. + // Delete those as well as their parent model node (if the parent model node has no other users + // and is not on the validModelInvIdList). + + int badModelVerCount = 0; + int modelVerDelCount = 0; + int modelDelCount = 0; + int parentPreventValidDelCount = 0; + + HashMap <String,String> parentPreventInEdgeIdHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it. + HashMap <String,String> parentPreventOutEdgeIdHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it. + HashMap <String,String> parentPreventIsaEdgeDelHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it. + + ArrayList <String> fileLines = readInValidWidgetInfoFile(); + // validWidgetModelVersionIdHash: key = nodeType, value = validModelVersionId for that nodeType + // Note - we currently only have one valid version per model for widget models. + HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines ); + + try { + for (Pair<Vertex, Vertex> nodePair : nodePairL) { + // The "fromNode" is the "bad/old" model-ver node that we moved off of + badModelVerCount++; + Vertex oldNode = nodePair.getValue0(); + String oldModVerId = oldNode.property("model-version-id").value().toString(); + Vertex parentModelNode = null; + + //DOUBLE CHECK THAT THIS IS NOT a valid model-version-id + + + boolean okToDelete = true; + //---- delete the oldNode if the only edge it has is its "belongsTo/OUT" edge to its parent model. + // AND if its parent node does not have any named-query edges ("IsA" edges) pointing to it. + Iterator <Edge> edgeInIter = oldNode.edges(Direction.IN); + while( edgeInIter.hasNext() ){ + Edge inE = edgeInIter.next(); + Vertex otherSideNode4ThisEdge = inE.inVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + // If there are any IN edges, we won't delete this thing. + okToDelete = false; + logAndPrintInfo("We will not delete old model-ver node because it still has IN edges. This model-version-id = [" + + oldModVerId + "], has IN edge from a [" + otherSideNodeType + "] node. "); + } + if( okToDelete ){ + // there were no OUT edges, make sure the only OUT edge is to it's parent + Iterator <Edge> edgeOutIter = oldNode.edges(Direction.OUT); + int edgeCount = 0; + while( edgeOutIter.hasNext() ){ + Edge badModVerE = edgeOutIter.next(); + edgeCount++; + if( edgeCount > 1 ){ + // If there are more than one OUT edges, we won't delete this thing. + okToDelete = false; + parentModelNode = null; + logAndPrintInfo("We will not delete old model-ver node because it still has > 1 OUT-edges. model-version-id = [" + oldModVerId + "]."); + } + else { + String eLabel = badModVerE.label().toString(); + Vertex otherSideNode4ThisEdge = badModVerE.inVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + if( ! eLabel.equals("org.onap.relationships.inventory.BelongsTo") ){ + logAndPrintInfo("We will not delete old model-ver node because it still has a non 'belongsTo' OUT-edge. model-version-id = [" + + oldModVerId + "], edgeLabel = [" + eLabel + "] edge goes to a [" + otherSideNodeType + "]. "); + okToDelete = false; + } + else { + if( ! otherSideNodeType.equals("model") ){ + logAndPrintInfo("We will not delete old model-ver node (model-version-id = [" + oldModVerId + "]) " + + " because it still has an OUT edge to a [" + otherSideNodeType + "] node. "); + okToDelete = false; + parentModelNode = null; + } + else { + parentModelNode = otherSideNode4ThisEdge; + String parentInvId = parentModelNode.property("model-invariant-id").value().toString(); + Iterator <Edge> pInIter = parentModelNode.edges(Direction.IN); + while( pInIter.hasNext() ){ + Edge inE = pInIter.next(); + String inELabel = inE.label().toString(); + if( ! inELabel.equals("org.onap.relationships.inventory.BelongsTo") ){ + Vertex otherSideNode = inE.outVertex(); + String otherSideNT = otherSideNode.value(AAIProperties.NODE_TYPE); + // If there are any IN edges still on the parent, + // we won't delete this model-ver since once the model-ver + // is gone, its hard to know what nodeType the model was + // for - so it would be hard to know what valid model-invariant-id + // to migrate its edges to. + okToDelete = false; + parentPreventIsaEdgeDelHash.put(parentInvId,""); + logAndPrintInfo("We will not delete old model-ver node because its" + + " parent model still has IN edges. The model with model-invariant-id = [" + + parentInvId + "], has an non-belongsTo IN edge, label = [" + + inELabel + "] from a [" + otherSideNT + "] node. "); + } + } + } + } + } + } + } + + if( okToDelete ){ + logAndPrintInfo(" >>> DELETEING model-ver node with model-version-id = [" + oldModVerId + "]" ); + modelVerDelCount++; + oldNode.remove(); + } + + if( parentModelNode != null && okToDelete ){ + // Delete the corresponding parent model IF it now has no + // edges anymore (and is not in our known valid model list) + // and we were deleting the model-ver also. + boolean okToDelParent = true; + String parentModInvId = parentModelNode.property("model-invariant-id").value().toString(); + + if( validModelInvariantIdHash.containsValue(parentModInvId) ){ + okToDelParent = false; + logAndPrintInfo("We will not delete old model node because it is on our valid widget list. " + + " model-invariant-id = [" + parentModInvId + "] "); + parentPreventValidDelCount++; + } + else { + Iterator <Edge> pInIter = parentModelNode.edges(Direction.IN); + while( pInIter.hasNext() ){ + Edge inE = pInIter.next(); + String inELabel = inE.label().toString(); + Vertex otherSideNode4ThisEdge = inE.outVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + // If there are any IN edges, we won't delete this thing. + okToDelParent = false; + parentPreventInEdgeIdHash.put(parentModInvId, ""); + logAndPrintInfo("We will not delete old model node (yet) because it still has IN edges. This model-invariant-id = [" + + parentModInvId + "], has IN edge, label = [" + + inELabel + "] from a [" + otherSideNodeType + "] node. "); + } + Iterator <Edge> pOutIter = parentModelNode.edges(Direction.OUT); + while( pOutIter.hasNext() ){ + Edge outE = pOutIter.next(); + String outELabel = outE.label().toString(); + Vertex otherSideNode4ThisEdge = outE.inVertex(); + String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE); + // If there are any OUT edges, we won't delete this thing. + okToDelParent = false; + parentPreventOutEdgeIdHash.put(parentModInvId, ""); + logAndPrintInfo("We will not delete old model node because it still has OUT edges. This model-invariant-id = [" + + parentModInvId + "], has OUT edge, label = [" + + outELabel + "] to a [" + otherSideNodeType + "] node. "); + } + } + + if( okToDelParent ){ + if( parentPreventInEdgeIdHash.containsKey(parentModInvId) ){ + // This parent had been prevented from being deleted until all its + // child model-ver's were deleted (it must have had more than one). + // So we can now remove it from the list of parent guys that + // could not be deleted. + parentPreventInEdgeIdHash.remove(parentModInvId); + } + logAndPrintInfo(" >>> DELETEING model node which was the parent of model-ver with model-version-id = [" + + oldModVerId + "]. This model-invariant-id = [" + parentModInvId + "]" ); + modelDelCount++; + parentModelNode.remove(); + } + } + } + + logAndPrintInfo(" >>> SUMMARY: total number of bad model-ver nodes found = " + badModelVerCount ); + logAndPrintInfo(" >>> SUMMARY: number of model-ver nodes deleted = " + modelVerDelCount ); + logAndPrintInfo(" >>> SUMMARY: number of model nodes deleted = " + modelDelCount ); + logAndPrintInfo(" >>> SUMMARY: number of model-ver nodes not deleted because their PARENT still had IsA edges = " + + parentPreventIsaEdgeDelHash.size() ); + logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they were valid = " + + parentPreventValidDelCount); + logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they had IN edges = " + + parentPreventInEdgeIdHash.size() ); + logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they had OUT edges = " + + parentPreventOutEdgeIdHash.size() ); + + + } catch (Exception e) { + logger.error("error encountered", e ); + success = false; + } + + } + + private ArrayList <String> readInValidWidgetInfoFile(){ + + ArrayList <String> fileLines = new ArrayList <String> (); + String homeDir = System.getProperty("AJSC_HOME"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logAndPrintInfo("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return fileLines; + } + if (configDir == null) { + logAndPrintInfo("ERROR: Could not find sys prop BUNDLECONFIG_DIR"); + success = false; + return fileLines; + } + String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/widget-model-migration-data/widget-model-migration-input.csv"; + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String modelInfoLine; + while ((modelInfoLine = br.readLine()) != null) { + modelInfoLine = modelInfoLine.replace("\n", "").replace("\r", ""); + if (!modelInfoLine.isEmpty()) { + fileLines.add(modelInfoLine); + } + } + } + catch (FileNotFoundException e) { + logger.error("ERROR: Could not find file " + fileName, e); + success = false; + } catch (IOException e) { + logger.error("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.error("encountered exception", e); + e.printStackTrace(); + success = false; + } + return fileLines; + } + + + HashMap <String,String> getModelVersionIdHash( ArrayList <String> fileLines ){ + + HashMap <String, String> versionIdHash = new HashMap <String,String> (); + + if( fileLines == null ){ + logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash"); + success = false; + return versionIdHash; + } + + for(int i = 0; i < fileLines.size(); i++ ){ + String mLine = fileLines.get(i); + String[] fields = mLine.split("\\,"); + if (fields.length != 3) { + logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line."); + success = false; + } + else { + versionIdHash.put(fields[0],fields[1]); + } + } + + // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is + // the correct model info for "virtual-data-center". Problem is that there is no vdc nodeType, but + // there are named-queries pointing at a bad widget-model for "vdc". + String virtDataCenterVerId = versionIdHash.get("virtual-data-center"); + if( virtDataCenterVerId != null ){ + versionIdHash.put("vdc",virtDataCenterVerId ); + } + + return versionIdHash; + } + + + HashMap <String,String> getModelInvariantIdHash( ArrayList <String> fileLines ){ + HashMap <String, String> invIdHash = new HashMap <String,String> (); + + if( fileLines == null ){ + logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash"); + success = false; + return invIdHash; + } + + for(int i = 0; i < fileLines.size(); i++ ){ + String mLine = fileLines.get(i); + String[] fields = mLine.split("\\,"); + if (fields.length != 3) { + logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line."); + success = false; + } + else { + invIdHash.put(fields[0],fields[2]); + } + } + + // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is + // the correct model info for "virtual-data-center". Problem is that there is no vdc nodeType, but + // there are named-queries pointing at a bad widget-model for "vdc". + String virtDataCenterInvId = invIdHash.get("virtual-data-center"); + if( invIdHash != null ){ + invIdHash.put("vdc",virtDataCenterInvId ); + } + return invIdHash; + } + + /** + * Log and print. + * + * @param msg + * the msg + */ + protected void logAndPrintInfo(String msg) { + System.out.println(msg); + logger.info(msg); + } + + + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java b/src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java new file mode 100644 index 0000000..3e09c51 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java @@ -0,0 +1,83 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +/*- +* ============LICENSE_START======================================================= +* org.openecomp.aai +* ================================================================================ +* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. +* ================================================================================ +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* ============LICENSE_END========================================================= +* */ + +package org.onap.aai.migration.v13; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.javatuples.Pair; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.EdgeMigrator; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + +@MigrationPriority(10) +@MigrationDangerRating(100) +@Enabled +public class MigrateEdgesBetweenVnfcAndVfModule extends EdgeMigrator { + + public MigrateEdgesBetweenVnfcAndVfModule(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.empty(); + } + + @Override + public List<Pair<String, String>> getAffectedNodePairTypes() { + logger.info("Starting migration to update edge properties between vf-module and vnfc...."); + List<Pair<String, String>> nodePairList = new ArrayList<Pair<String, String>>(); + nodePairList.add(new Pair<>("vf-module", "vnfc")); + return nodePairList; + } + + @Override + public String getMigrationName() { + return "migrate-edge-vnfc-and-vf-module"; + } +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java b/src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java new file mode 100644 index 0000000..3f90934 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java @@ -0,0 +1,297 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v13; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(26) +@MigrationDangerRating(100) +@Enabled +public class MigrateForwarderEvcCircuitId extends Migrator { + + private final String PNF_NODE_TYPE = "pnf"; + private final String PROPERTY_PNF_NAME = "pnf-name"; + private final String PROPERTY_INTERFACE_NAME = "interface-name"; + private final String PROPERTY_FORWARDER_ROLE = "forwarder-role"; + private final String VALUE_INGRESS = "ingress"; + private final String PROPERTY_SEQUENCE = "sequence"; + private final int VALUE_EXPECTED_SEQUENCE = 1; + private final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc"; + private final String PROPERTY_CIRCUIT_ID = "circuit-id"; + + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + private int migrationSuccess = 0; + private int migrationFailure = 0; + + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static final String homeDir = System.getProperty("AJSC_HOME"); + + protected class CircuitIdFileData { + String pnfName; + String interfaceName; + + String oldCircuitId; + String newCircuitId; + + public String getPnfName() { + return pnfName; + } + public void setPnfName(String pnfName) { + this.pnfName = pnfName; + } + public String getInterfaceName() { + return interfaceName; + } + public void setInterfaceName(String interfaceName) { + this.interfaceName = interfaceName; + } + + public String getOldCircuitId() { + return oldCircuitId; + } + public void setOldCircuitId(String oldCircuitId) { + this.oldCircuitId = oldCircuitId; + } + public String getNewCircuitId() { + return newCircuitId; + } + public void setNewCircuitId(String newCircutId) { + this.newCircuitId = newCircutId; + } + } + + private static ArrayList<CircuitIdFileData> circuitIdList = new ArrayList<CircuitIdFileData>(); + + public MigrateForwarderEvcCircuitId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + this.g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void run() { + logger.info("---------- Start migration ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info(this.MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/"; + int fileLineCounter = 0; + String fileName = feedDir+ "circuitIds.csv"; + logger.info(fileName); + logger.info("---------- Processing Entries from file ----------"); + try { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + if (!line.isEmpty()) { + if (fileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + CircuitIdFileData lineData = new CircuitIdFileData(); + lineData.setPnfName(colList[0].replaceAll("^\"|\"$", "") + .replaceAll("[\t\n\r]+", "").trim()); + lineData.setInterfaceName(colList[1].replaceAll("^\"|\"$", "") + .replaceAll("[\t\n\r]+", "").trim()); + lineData.setOldCircuitId(colList[2].replaceAll("^\"|\"$", "") + .replaceAll("[\t\n\r]+", "").trim()); + lineData.setNewCircuitId(colList[4].replaceAll("^\"|\"$", "") + .replaceAll("[\t\n\r]+", "").trim()); + circuitIdList.add(lineData); + + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength + "\n"); + if (this.headerLength != 6){ + logger.info(this.MIGRATION_ERROR + "ERROR: Input file should have 6 columns"); + this.success = false; + return; + } + } + } + fileLineCounter++; + } + updateCircuitIdCount(); + logger.info ("\n \n ******* Final Summary for Circuit Id Migration ********* \n"); + logger.info(this.MIGRATION_SUMMARY_COUNT + "CircuitIds processed: "+ migrationSuccess); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total Rows Count: "+(fileLineCounter + 1)); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Unprocessed CircuitIds : "+ migrationFailure +"\n"); + + } catch (FileNotFoundException e) { + logger.info(this.MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info(this.MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info(this.MIGRATION_ERROR + "encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + private void updateCircuitIdCount() { + int numberOfLines = circuitIdList.size(); + for(int i = 0; i < numberOfLines; i ++) { + GraphTraversal<Vertex, Vertex> nodeList = g.V().has(this.PROPERTY_PNF_NAME, circuitIdList.get(i).getPnfName()) + .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).in("tosca.relationships.network.BindsTo") + .has(this.PROPERTY_INTERFACE_NAME, circuitIdList.get(i).getInterfaceName()).in("org.onap.relationships.inventory.ForwardsTo") + .has(this.PROPERTY_FORWARDER_ROLE, this.VALUE_INGRESS).has(this.PROPERTY_SEQUENCE, this.VALUE_EXPECTED_SEQUENCE) + .out("org.onap.relationships.inventory.Uses").in("org.onap.relationships.inventory.BelongsTo"); + if(!nodeList.hasNext()) { + logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + + " to " + circuitIdList.get(i).getNewCircuitId() + " Graph Traversal failed \n"); + migrationFailure++; + } + while (nodeList.hasNext()) { + Vertex forwarderEvcVtx = nodeList.next(); + boolean updateSuccess = false; + if (forwarderEvcVtx != null) { + logger.info("forwarder-evc-id is " + forwarderEvcVtx.value("forwarder-evc-id")); + if(forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent() && + forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID).equals(circuitIdList.get(i).getNewCircuitId())) { + logger.info("Skipping Record: Old Collector CircuitId " + forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID) + + " is the same as New Collector CircuitId " + circuitIdList.get(i).getNewCircuitId() + "\n"); + migrationFailure++; + } + else if(!circuitIdList.get(i).getNewCircuitId().isEmpty() && + forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent() && + circuitIdList.get(i).getOldCircuitId().equals(forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID))) + { + try { + forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID, circuitIdList.get(i).getNewCircuitId()); + this.touchVertexProperties(forwarderEvcVtx, false); + updateSuccess = true; + + } catch (Exception e) { + logger.info(e.toString()); + logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + + " to " + circuitIdList.get(i).getNewCircuitId() + "\n"); + migrationFailure++; + + } + if(updateSuccess) { + String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_" + + forwarderEvcVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("Update of Circuit Id " + circuitIdList.get(i).getOldCircuitId() + " to " + + circuitIdList.get(i).getNewCircuitId() + " successful \n"); + migrationSuccess++; + } + } + else if(!forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent()) + { + logger.info(this.MIGRATION_ERROR + "ERROR: Old Collector Circuit Id not found " + circuitIdList.get(i).getOldCircuitId() + + " was not updated to " + circuitIdList.get(i).getNewCircuitId() + "\n"); + migrationFailure++; + } + else { + logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + + " to " + circuitIdList.get(i).getNewCircuitId() + "\n"); + migrationFailure++; + } + } + } + } + + } + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{this.FORWARDER_EVC_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateForwarderEvcCircuitId"; + } +} diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java b/src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java new file mode 100644 index 0000000..d32ce81 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java @@ -0,0 +1,103 @@ +/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.migration.ValueMigrator;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(1)
+@MigrationDangerRating(1)
+@Enabled
+public class MigrateGenericVnfMgmtOptions extends ValueMigrator {
+
+ protected static final String VNF_NODE_TYPE = "generic-vnf";
+
+
+ private static Map<String, Map> map;
+ private static Map<String, String> pair1;
+ private static Map<String, List<String>> conditionsMap;
+
+ public MigrateGenericVnfMgmtOptions(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+ super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setMgmtOptions(), setConditionsMap(), false);
+
+ }
+
+ private static Map<String, Map> setMgmtOptions(){
+ map = new HashMap<>();
+ pair1 = new HashMap<>();
+
+ pair1.put("management-option", "AT&T Managed-Basic");
+ map.put("generic-vnf", pair1);
+
+ return map;
+ }
+
+
+
+ public static Map<String, List<String>> setConditionsMap() {
+ List<String> conditionsList = new ArrayList<String>();
+ conditionsMap = new HashMap<>();
+
+ conditionsList.add("HN");
+ conditionsList.add("HP");
+ conditionsList.add("HG");
+
+ conditionsMap.put("vnf-type", conditionsList);
+
+ return conditionsMap;
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.SUCCESS;
+ }
+
+ @Override
+ public Optional<String[]> getAffectedNodeTypes() {
+ return Optional.of(new String[]{VNF_NODE_TYPE});
+ }
+
+ @Override
+ public String getMigrationName() {
+ return "MigrateGenericVnfMgmtOptions";
+ }
+
+ @Override
+ public boolean isUpdateDmaap(){
+ return true;
+ }
+
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java b/src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java new file mode 100644 index 0000000..77ecc7e --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java @@ -0,0 +1,142 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v14; + +import java.util.List; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(20) +@MigrationDangerRating(2) +@Enabled +public class MigrateMissingFqdnOnPservers extends Migrator { + + protected static final String PSERVER_NODE_TYPE = "pserver"; + protected static final String PSERVER_FQDN = "fqdn"; + protected static final String PSERVER_HOSTNAME = "hostname"; + protected static final String PSERVER_SOURCEOFTRUTH = "source-of-truth"; + + private boolean success = true; + private GraphTraversalSource g = null; + + protected final AtomicInteger falloutRowsCount = new AtomicInteger(0); + + public MigrateMissingFqdnOnPservers(TransactionalGraphEngine engine, LoaderFactory loaderFactory, + EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public void run() { + logger.info("---------- Start Updating fqdn for pserver ----------"); + + try { + int pserverCount = 0; + int pserverUpdatedCount = 0; + int pserverSkippedCount = 0; + int pserverErrorCount = 0; + int pserverWithMissingSOTCount = 0; + + GraphTraversal<Vertex, Vertex> pserverList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, PSERVER_NODE_TYPE).union(__.hasNot(PSERVER_FQDN),__.has(PSERVER_FQDN,""));//gets list of pservers with missing and empty fqdn + + while (pserverList.hasNext()) { + pserverCount++; + Vertex vertex = pserverList.next(); + String hostname = null; + String sourceOfTruth = null; + hostname = vertex.property(PSERVER_HOSTNAME).value().toString(); + + if(vertex.property(PSERVER_SOURCEOFTRUTH).isPresent()) { + sourceOfTruth = vertex.property(PSERVER_SOURCEOFTRUTH).value().toString(); + }else { + logger.info("Missing source of truth for hostname : " + hostname); + pserverWithMissingSOTCount++; + } + + if (!hostname.contains(".")) { + logger.info("Invalid format hostname :" + hostname + " and its source of truth is : " + sourceOfTruth); + pserverSkippedCount++; + continue; + } + + try { + vertex.property(PSERVER_FQDN, hostname); + this.touchVertexProperties(vertex, false); + logger.info("Updated fqdn from hostname : " + hostname + " and its source of truth is : " + sourceOfTruth); + pserverUpdatedCount++; + } catch (Exception e) { + success = false; + pserverErrorCount++; + logger.error(MIGRATION_ERROR + "encountered exception for fqdn update for pserver with hostname :" + hostname + + " and source of truth : " + sourceOfTruth, e); + } + } + + logger.info("\n \n ******* Final Summary of Updated fqdn for pserver Migration ********* \n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Total Number of pservers with missing or empty fqdn : "+pserverCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers updated: " + pserverUpdatedCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers invalid: " + pserverSkippedCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers failed to update due to error : " + pserverErrorCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers with missing source of truth: " + pserverWithMissingSOTCount + "\n"); + + } catch (Exception e) { + logger.info("encountered exception", e); + success = false; + } + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[] { PSERVER_NODE_TYPE }); + } + + @Override + public String getMigrationName() { + return "MigrateMissingFqdnOnPserver"; + } + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java b/src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java new file mode 100644 index 0000000..afdea57 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java @@ -0,0 +1,172 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v14; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + +@MigrationPriority(20) +@MigrationDangerRating(2) +@Enabled +public class MigrateNetworkTechToCloudRegion extends Migrator{ + + protected static final String CLOUDREGION_NODETYPE = "cloud-region"; + protected static final String CLOUD_OWNER = "cloud-owner"; + protected static final String NETWORK_TECHNOLOGY_NODETYPE = "network-technology"; + protected static final String NETWORK_TECHNOLOGY_ID = "network-technology-id"; + protected static final String NETWORK_TECHNOLOGY_NAME = "network-technology-name"; + + + private boolean success = true; + + private static List<String> dmaapMsgList = new ArrayList<String>(); + + + public MigrateNetworkTechToCloudRegion(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public void run() { + + List<Vertex> cloudRegionVertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CLOUDREGION_NODETYPE).has(CLOUD_OWNER,"att-aic").toList(); + logger.info("Number of cloud-region with cloud-owner att-aic : " + cloudRegionVertextList.size()); + createEdges(cloudRegionVertextList, "CONTRAIL"); + createEdges(cloudRegionVertextList, "AIC_SR_IOV"); + + cloudRegionVertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CLOUDREGION_NODETYPE).has(CLOUD_OWNER,"att-nc").toList(); + logger.info("Number of cloud-region with cloud-owner att-nc : " + cloudRegionVertextList.size()); + createEdges(cloudRegionVertextList, "OVS"); + createEdges(cloudRegionVertextList, "STANDARD-SR-IOV"); + + } + + private void createEdges(List<Vertex> sourceVertexList, String networkTechName) + { + int networkTechEdgeCount = 0; + int networkTechEdgeErrorCount = 0; + + List<Vertex> networkTechVertexList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, NETWORK_TECHNOLOGY_NODETYPE).has(NETWORK_TECHNOLOGY_NAME, networkTechName) + .toList(); + + logger.info("---------- Start Creating an Edge from cloud-region to network-technology nodes with network-technology-name " + networkTechName + " ----------"); + + for (Vertex cloudRegionVertex : sourceVertexList) { + + try { + + for (Vertex networkVertex : networkTechVertexList) { + if (networkVertex != null) { + boolean edgePresent = false; + // Check if edge already exists for each of the source vertex + List<Vertex> outVertexList = this.engine.asAdmin().getTraversalSource().V(cloudRegionVertex) + .out().has(AAIProperties.NODE_TYPE, NETWORK_TECHNOLOGY_NODETYPE) + .has(NETWORK_TECHNOLOGY_NAME, networkTechName).has(NETWORK_TECHNOLOGY_ID, + networkVertex.property(NETWORK_TECHNOLOGY_ID).value().toString()) + .toList(); + Iterator<Vertex> vertexItr = outVertexList.iterator(); + if (outVertexList != null && !outVertexList.isEmpty() && vertexItr.hasNext()) { + logger.info("\t Edge already exists from " + CLOUDREGION_NODETYPE + " with " + CLOUD_OWNER + + " and cloud-region-id " + + cloudRegionVertex.property("cloud-region-id").value().toString() + " to " + + NETWORK_TECHNOLOGY_NODETYPE + " nodes with " + NETWORK_TECHNOLOGY_NAME + " " + + networkTechName); + edgePresent = true; + continue; + } + // Build edge from vertex to modelVerVertex + if (!edgePresent) { + this.createCousinEdge(cloudRegionVertex, networkVertex); + updateDmaapList(cloudRegionVertex); + networkTechEdgeCount++; + } + } else { + networkTechEdgeErrorCount++; + logger.info("\t" + MIGRATION_ERROR + "Unable to create edge from " + CLOUDREGION_NODETYPE + + " with " + CLOUD_OWNER + " to " + NETWORK_TECHNOLOGY_NODETYPE + " nodes with " + + NETWORK_TECHNOLOGY_NAME + " " + networkTechName); + + } + } + } catch (Exception e) { + success = false; + networkTechEdgeErrorCount++; + logger.error("\t" + MIGRATION_ERROR + "encountered exception from " + NETWORK_TECHNOLOGY_NODETYPE + + " node when trying to create edge to " + CLOUDREGION_NODETYPE, e); + } + } + + logger.info("\n \n ******* Summary " + NETWORK_TECHNOLOGY_NODETYPE + " Nodes: Finished creating an Edge from " + + CLOUDREGION_NODETYPE + " with " + CLOUD_OWNER + " to " + NETWORK_TECHNOLOGY_NODETYPE + " nodes with " + + NETWORK_TECHNOLOGY_NAME + " " + networkTechName + " ********* \n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of edges created from cloud-region to "+networkTechName +" network-technology : " + networkTechEdgeCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of edges failed from cloud-region to "+networkTechName +" network-technology : " + networkTechEdgeErrorCount + "\n"); + + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{NETWORK_TECHNOLOGY_NODETYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateNetworkTech"; + } + + private void updateDmaapList(Vertex v){ + String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_" + v.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("\tAdding Updated "+ CLOUDREGION_NODETYPE +" Vertex " + v.id().toString() + " to dmaapMsgList...."); + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + +} diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java b/src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java new file mode 100644 index 0000000..d0c1e15 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java @@ -0,0 +1,576 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v14; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.structure.*; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.edges.enums.AAIDirection; +import org.onap.aai.edges.enums.EdgeProperty; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.onap.aai.setup.SchemaVersions; +import org.springframework.web.util.UriUtils; + +import javax.ws.rs.core.UriBuilder; + +import java.io.UnsupportedEncodingException; +import java.util.*; +import java.util.stream.Collectors; + +@Enabled +@MigrationPriority(5) +@MigrationDangerRating(100) +public class MigrateSameSourcedRCTROPserverData extends EdgeSwingMigrator { + /** + * Instantiates a new migrator. + * + * @param engine + */ + private final String PARENT_NODE_TYPE = "pserver"; + private boolean success = true; + protected Set<Object> seen = new HashSet<>(); + private Map<String, UriBuilder> nodeTypeToUri; + private Map<String, Set<String>> nodeTypeToKeys; + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static List<Introspector> dmaapDeleteList = new ArrayList<Introspector>(); + Vertex complexFromOld; + private static int dupROCount = 0; + private static int roPserversUpdatedCount = 0; + private static int roPserversDeletedCount = 0; + private static int dupRctCount = 0; + private static int rctPserversUpdatedCount = 0; + private static int rctPserversDeletedCount = 0; + + public MigrateSameSourcedRCTROPserverData(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + createDmaapFilesForDelete(dmaapDeleteList); + } + + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + return null; + } + + @Override + public String getNodeTypeRestriction() { + return null; + } + + @Override + public String getEdgeLabelRestriction() { + return null; + } + + @Override + public String getEdgeDirRestriction() { + return null; + } + + @Override + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{"lag-interface", "l-interface", "l3-interface-ipv4-address", "l3-interface-ipv6-address", "sriov-vf", "vlan", "p-interface", "sriov-pf"}); + } + + @Override + public String getMigrationName() { + return "MigrateCorrectRCTSourcedPserverData"; + } + + @Override + public void run() { + + + nodeTypeToUri = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect( + Collectors.toMap( + e -> e.getKey(), + e -> UriBuilder.fromPath(e.getValue().getFullGenericURI().replaceAll("\\{"+ e.getKey() + "-", "{")) + )); + + nodeTypeToKeys = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect( + Collectors.toMap( + e -> e.getKey(), + e -> e.getValue().getKeys() + )); + + List<Vertex> pserverTraversalRCT = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within("RCT", "AAIRctFeed")).toList(); + int rctCount = pserverTraversalRCT.size(); + + try { + logger.info("RCT pserver count: "+rctCount); + updateToLatestRCT(pserverTraversalRCT); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } catch (AAIException e) { + e.printStackTrace(); + } + + List<Vertex> pserverTraversalRO = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within("RO", "AAI-EXTENSIONS")).toList(); + int roCount = pserverTraversalRO.size(); + try { + logger.info("RO pserver count: "+roCount); + updateToLatestRO(pserverTraversalRO); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } catch (AAIException e) { + e.printStackTrace(); + } + + logger.info ("\n \n ******* Migration Summary Counts for RCT and RO sourced pservers in A&AI ********* \n"); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RCT pservers: " +rctCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate RCT pserver count: "+ dupRctCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT updated: "+ rctPserversUpdatedCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT deleted: "+ rctPserversDeletedCount +"\n"); + + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RO pservers: " +roCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate RO pserver count: "+ dupROCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO updated: "+ roPserversUpdatedCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO deleted: "+ roPserversDeletedCount +"\n"); + } + + public void updateToLatestRO(List<Vertex> list) throws UnsupportedEncodingException, AAIException { + List<Vertex> removeROList = new ArrayList<>(); + + Vertex latestV = null; + + for(int i=0;i<list.size();i++){ + Vertex currV = list.get(i); + + if (removeROList.contains(currV)){ + logger.info("RO Pserver: "+currV.property("hostname").value().toString() + "was already added to delete list. No further processing needed for this."); + continue; + } + logger.info("RO Pserver: "+currV.property("hostname").value().toString()); + + for(int j=i+1; j<list.size();j++) { + + Vertex temp = list.get(j); + + String[] currentVHostname = currV.property("hostname").value().toString().split("\\."); + String[] tempHostname = temp.property("hostname").value().toString().split("\\."); + + if (currentVHostname.length >0 && tempHostname.length > 0){ + if (!currentVHostname[0].isEmpty() && !tempHostname[0].isEmpty() && currentVHostname[0].equals(tempHostname[0])) { + dupROCount++; + logger.info("\tTemp RO Pserver: "+temp.property("hostname").value().toString()); + if (temp.property("hostname").value().toString().length() > currV.property("hostname").value().toString().length()) { + //temp is the latest vertex swing everything from currV to temp + latestV = temp; + movePlink(currV, latestV); + moveLagInterfaces(currV, latestV); + swingEdges(currV, latestV, null, null, "BOTH"); + modifyChildrenUri(latestV); + String dmaapMsg = System.nanoTime() + "_" + temp.id().toString() + "_" + temp.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + roPserversUpdatedCount++; + logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list"); + if (!removeROList.contains(list.get(i))) { + removeROList.add(list.get(i)); + Introspector obj = serializer.getLatestVersionView(currV);//currV + logger.info("\tAdding pserver "+currV.property("hostname").value().toString() + " to delete list"); + dmaapDeleteList.add(obj); + roPserversDeletedCount++; + } + currV = latestV; + } else { + //currV is the latest temp is the old vertex swing everything from temp to currV + latestV = currV; + movePlink(temp, latestV); + moveLagInterfaces(temp, latestV); + swingEdges(temp, latestV, null, null, "BOTH"); + modifyChildrenUri(latestV); + String dmaapMsg = System.nanoTime() + "_" + currV.id().toString() + "_" + currV.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list"); + roPserversUpdatedCount++; + + if (!removeROList.contains(list.get(j))) { + removeROList.add(list.get(j)); + Introspector obj = serializer.getLatestVersionView(temp);//temp + logger.info("\tAdding pserver "+temp.property("hostname").value().toString() + " to delete list"); + dmaapDeleteList.add(obj); + roPserversDeletedCount++; + } + } + } + } + } + } + logger.info("\tCount of RO Pservers removed = "+removeROList.size()+"\n"); + removeROList.forEach(v ->v.remove()); + + } + +// public void addComplexEdge(Vertex Latest) throws AAIException { +// +// if(!(graphTraversalSource().V(Latest).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").hasNext())){ +// if (complexFromOld != null) +// createCousinEdge(Latest,complexFromOld); +// +// } +// } + + +// public void dropComplexEdge(Vertex old){ +// List<Vertex> locatedInEdgeVertexList = graphTraversalSource().V(old).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").toList(); +// if (locatedInEdgeVertexList != null && !locatedInEdgeVertexList.isEmpty()){ +// Iterator<Vertex> locatedInEdgeVertexListItr = locatedInEdgeVertexList.iterator(); +// while (locatedInEdgeVertexListItr.hasNext()){ +// complexFromOld = locatedInEdgeVertexListItr.next(); +// if ("complex".equalsIgnoreCase(complexFromOld.property("aai-node-type").value().toString())){ +// Edge pserverToComplexEdge = complexFromOld.edges(Direction.IN, "org.onap.relationships.inventory.LocatedIn").next(); +// pserverToComplexEdge.remove(); +// } +// } +// } +// } + + + private GraphTraversalSource graphTraversalSource() { + return this.engine.asAdmin().getTraversalSource(); + } + + public void updateToLatestRCT(List<Vertex> list) throws UnsupportedEncodingException, AAIException { + List<Vertex>removeRCTList = new ArrayList<>(); + + Vertex latestV = null; + for(int i=0;i<list.size();i++) { + Vertex currV = list.get(i); + if (!currV.property("fqdn").isPresent()){ + continue; + } + + if (removeRCTList.contains(currV)){ + logger.info("RCT Pserver: "+currV.property("hostname").value().toString() + "was already added to delete list. No further processing needed for this."); + continue; + } + logger.info("RCT Pserver: "+currV.property("hostname").value().toString()); + for(int j=i+1;j<list.size();j++) { + + Vertex temp = list.get(j); + if (temp.property("fqdn").isPresent()) { + String[] currentVFqdn = currV.property("fqdn").value().toString().split("\\."); + String[] tempFqdn = temp.property("fqdn").value().toString().split("\\."); + if (currentVFqdn.length >0 && tempFqdn.length > 0){ + String currentFqdnFirstToken = currentVFqdn[0]; + String tempFqdnFirstToken = tempFqdn[0]; + if (!currentFqdnFirstToken.isEmpty() && !tempFqdnFirstToken.isEmpty() && currentFqdnFirstToken.equals(tempFqdnFirstToken)) { + dupRctCount++; + logger.info("\tMatching Temp RCT Pserver: "+temp.property("hostname").value().toString()); + long tempRV = Long.parseLong(temp.value("resource-version")); + long currRV = Long.parseLong(currV.value("resource-version")); + logger.info("\tcurrRV: "+currRV+ ", tempRV: "+tempRV); + if (Long.parseLong(temp.value("resource-version")) > Long.parseLong(currV.value("resource-version"))) { + //currv is old, temp vertex found in traversal is the latest + latestV = temp; + movePlink(currV, latestV); + moveLagInterfaces(currV, latestV); + swingEdges(currV, latestV, null, null, "BOTH"); + modifyChildrenUri(latestV); + String dmaapMsg = System.nanoTime() + "_" + temp.id().toString() + "_" + temp.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + rctPserversUpdatedCount++; + logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list"); + if (!removeRCTList.contains(list.get(i))) { + removeRCTList.add(list.get(i)); + Introspector obj = serializer.getLatestVersionView(currV); + logger.info("\tAdding pserver "+currV.property("hostname").value().toString() + " to delete list"); + dmaapDeleteList.add(obj); + rctPserversDeletedCount++; + } + currV = latestV; + } else { + //currv Is the latest, temp vertex found is an older version + latestV = currV; + movePlink(temp, latestV); + moveLagInterfaces(temp, latestV); + swingEdges(temp, latestV, null, null, "BOTH"); + modifyChildrenUri(latestV); + String dmaapMsg = System.nanoTime() + "_" + currV.id().toString() + "_" + currV.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + rctPserversUpdatedCount++; + logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list"); + if (!removeRCTList.contains(list.get(j))) { + removeRCTList.add(list.get(j)); + Introspector obj = serializer.getLatestVersionView(temp); + logger.info("\tAdding pserver "+temp.property("hostname").value().toString() + " to delete list"); + dmaapDeleteList.add(obj); + rctPserversDeletedCount++; + } + } + + } + } + } + } + } + logger.info("\tCount of RCT Pservers removed = "+removeRCTList.size() +"\n"); + removeRCTList.forEach((r)-> r.remove()); + + } + + + public void movePlink(Vertex old, Vertex latest) throws AAIException { + + List<Vertex> pInterfacesOnOldPserver = graphTraversalSource().V(old).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList(); + List<Vertex> pInterfacesOnLatestPserver = graphTraversalSource().V(latest).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList(); + // SCENARIO 1 = no match found move everything from pserver old to new in swing edges call outside this fcn + + if(pInterfacesOnLatestPserver.size() == 0){ + logger.info("\tNo P-interfaces found on "+latest.property("hostname").value().toString()+ "..."); + if(pInterfacesOnOldPserver.size() != 0) { + logger.info("\tP-interfaces found on "+old.property("hostname").value().toString()+ ". Update plink name and move the p-interfaces to latest pserver."); + for (int i = 0; i < pInterfacesOnOldPserver.size(); i++) { + if (graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type", "p-interface").out("tosca.relationships.network.LinksTo").hasNext()) { + Vertex oldPlink = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type", "p-interface").out("tosca.relationships.network.LinksTo").next(); + String linkName = oldPlink.property("link-name").value().toString(); + logger.info("\tPhysical-link "+linkName+ " found on "+graphTraversalSource().V(pInterfacesOnOldPserver.get(i).property("interface-name").value().toString())); + linkName = linkName.replaceAll(old.property("hostname").value().toString(), latest.property("hostname").value().toString()); + String[] PlinkBarSplit = linkName.split("\\|"); + if (PlinkBarSplit.length > 1) { + modifyPlinkName(oldPlink, linkName, old); + } + } + } + } + + return; + } + + for(int i=0; i<pInterfacesOnOldPserver.size();i++){ + for(int j=0; j<pInterfacesOnLatestPserver.size(); j++){ + Vertex oldPinterface = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").next(); + //pinterfaces are the same + if(pInterfacesOnOldPserver.get(i).property("interface-name").value().toString().equals(pInterfacesOnLatestPserver.get(j).property("interface-name").value().toString())){ + Vertex newPinterface = graphTraversalSource().V(pInterfacesOnLatestPserver.get(j)).has("aai-node-type","p-interface").next(); + logger.info("\tMatching P-interface "+newPinterface.property("interface-name").value().toString()+ " found on pservers"); +// SCENARIO 3 there already exists a plink in the new pinterface need to move all other pinterfaces and nodes in swing edges after the fcn no need for plink name change + List<Vertex> oldPlinkList = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").toList(); + if(graphTraversalSource().V(pInterfacesOnLatestPserver.get(j)).has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").hasNext()){ + logger.info("\tPhysical-link exists on new pserver's p-interface also... So, don't move this p-interface to new pserver..."); + if (!oldPlinkList.isEmpty()) { + //drop edge b/w oldPInterface and oldPlink + String oldPlinkName = ""; + Edge oldPIntToPlinkEdge = oldPinterface.edges(Direction.OUT, "tosca.relationships.network.LinksTo").next(); + oldPIntToPlinkEdge.remove(); + + //remove physical link vertex also + Vertex oldPlink = null; + + oldPlink = oldPlinkList.get(0); + oldPlinkName = oldPlink.property("link-name").value().toString(); + oldPlink.remove(); + logger.info("\tDropped edge b/w old P-interface and Physical-link, and deleted old physical-link "+oldPlinkName); + } + moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface); + } +// SCENARIO 2 = there is no plink in new pinterface and move old plink to new + else{ + logger.info("\tNo Physical-link exists on new pserver's p-interface... Move old plink to new pserver's p-interface"); + Vertex oldPlink = null; + if (!oldPlinkList.isEmpty()) { + oldPlink = oldPlinkList.get(0); + String linkName = oldPlink.property("link-name").value().toString(); + createCousinEdge(newPinterface,oldPlink); + logger.info("\tCreated edge b/w new P-interface and old physical-link "+linkName); + //drop edge b/w oldPInterface and oldPlink + Edge oldPIntToPlinkEdge = oldPinterface.edges(Direction.OUT, "tosca.relationships.network.LinksTo").next(); + oldPIntToPlinkEdge.remove(); + logger.info("\tDropped edge b/w old P-interface and Physical-link "+linkName); + linkName = linkName.replaceAll(old.property("hostname").value().toString(),latest.property("hostname").value().toString()); + + String[] PlinkBarSplit = linkName.split("\\|"); + if(PlinkBarSplit.length>1) { + modifyPlinkName(oldPlink,linkName,old); + } + else{ + logger.info("\t" +oldPlink.property("link-name").value().toString()+ " does not comply with naming conventions related to pserver hostname:" + old.property("hostname").value().toString()); + } + moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface); + } else { + moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface); + } + } + //delete the oldPInterface + oldPinterface.remove(); + break; + } + } + } + } + + private void moveChildrenOfMatchingPInterfaceToNewPserver(List<Vertex> pInterfacesOnOldPserver, int i, Vertex oldPinterface, Vertex newPinterface) { + // Check if there are children under old pserver's p-int and move them to new pserver's matching p-int + List<Vertex> oldPIntChildren = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").in().has("aai-node-type", P.within("l-interface","sriov-pf")).toList(); + if (oldPIntChildren != null && !oldPIntChildren.isEmpty()){ + oldPIntChildren.forEach((c)-> { swingEdges(oldPinterface, newPinterface, null, null, "IN"); +// c.remove(); + }); + logger.info("\t"+"Child vertices of p-interface on old pserver have been moved to p-interface on new pserver"); + + } + } + + public void modifyPlinkName(Vertex oldPlink,String linkName,Vertex old ){ + + String[] PlinkBarSplit = linkName.split("\\|"); + if(PlinkBarSplit.length>1) { + String[] pserv1Connection = PlinkBarSplit[0].split(":"); + String[] pserv2Connection = PlinkBarSplit[1].split(":"); + + HashMap<String, String> map = new HashMap<>(); + map.put(pserv1Connection[0], pserv1Connection[1]); + map.put(pserv2Connection[0], pserv2Connection[1]); + + String[] temp = new String[2]; + temp[0] = pserv1Connection[0]; + temp[1] = pserv2Connection[0]; + Arrays.sort(temp); + String linkNameNew = temp[0] + ":" + map.get(temp[0]).toString() + "|" + temp[1] + ":" + map.get(temp[1]).toString(); + oldPlink.property("link-name", linkNameNew); + logger.info("\tUpdate physical-link name from "+linkName+ " to "+linkNameNew); + } + else{ + logger.info("\t" +oldPlink.property("link-name").value().toString()+ "Does not comply with naming conventions related to pserver hostname:" + old.property("hostname").value().toString()); + + } + } + + public void moveLagInterfaces(Vertex old, Vertex latest) throws AAIException { + + List<Vertex> lagInterfacesOnOldPserver = graphTraversalSource().V(old).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList(); + List<Vertex> lagInterfacesOnLatestPserver = graphTraversalSource().V(latest).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList(); + // SCENARIO 1 = no match found move everything from pserver old to new in swing edges call outside this fcn + + if(lagInterfacesOnLatestPserver.size() == 0){ + return; + } + + for(int i=0; i<lagInterfacesOnOldPserver.size();i++){ + + for(int j=0; j<lagInterfacesOnLatestPserver.size(); j++){ + //lag interface-name matches on both + if(lagInterfacesOnOldPserver.get(i).property("interface-name").value().toString().equals(lagInterfacesOnLatestPserver.get(j).property("interface-name").value().toString())){ + Vertex oldLaginterface = graphTraversalSource().V(lagInterfacesOnOldPserver.get(i)).has("aai-node-type","lag-interface").next(); + Vertex newLaginterface = graphTraversalSource().V(lagInterfacesOnLatestPserver.get(j)).has("aai-node-type","lag-interface").next(); + //Check if there are any children on the old lag-interface and move them to new + // Check if there are children under old pserver's p-int and move them to new pserver's matching p-int + List<Vertex> oldPIntChildren = graphTraversalSource().V(lagInterfacesOnOldPserver.get(i)).has("aai-node-type","lag-interface").in().has("aai-node-type", P.within("l-interface")).toList(); + if (oldPIntChildren != null && !oldPIntChildren.isEmpty()){ + oldPIntChildren.forEach((c)-> swingEdges(oldLaginterface, newLaginterface, null, null, "BOTH")); + } + logger.info("\t"+"Child vertices of lag-interface on old pserver have been moved to lag-interface on new pserver"); + //delete the oldLagInterface + oldLaginterface.remove(); + break; + } + } + } + } + + + private void modifyChildrenUri(Vertex v) throws UnsupportedEncodingException, AAIException { + logger.info("\tModifying children uri for all levels....."); + Set<Vertex> parentSet = new HashSet<>(); + parentSet.add(v); + verifyOrAddUri("", parentSet); + } + + + protected void verifyOrAddUri(String parentUri, Set<Vertex> vertexSet) throws UnsupportedEncodingException, AAIException { + + + String correctUri; + for (Vertex v : vertexSet) { + seen.add(v.id()); + //if there is an issue generating the uri catch, log and move on; + try { + correctUri = parentUri + this.getUriForVertex(v); + } catch (Exception e) { + logger.error("\tVertex has issue generating uri " + e.getMessage() + "\n\t" + this.asString(v)); + continue; + } + try { + v.property(AAIProperties.AAI_URI, correctUri); + } catch (Exception e) { + logger.info(e.getMessage() + "\n\t" + this.asString(v)); + } + if (!v.property(AAIProperties.AAI_UUID).isPresent()) { + v.property(AAIProperties.AAI_UUID, UUID.randomUUID().toString()); + } + this.verifyOrAddUri(correctUri, getChildren(v)); + } + } + + protected Set<Vertex> getChildren(Vertex v) { + + Set<Vertex> children = graphTraversalSource().V(v).bothE().not(__.has(EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString())).otherV().toSet(); + + return children.stream().filter(child -> !seen.contains(child.id())).collect(Collectors.toSet()); + } + + protected String getUriForVertex(Vertex v) { + String aaiNodeType = v.property(AAIProperties.NODE_TYPE).value().toString(); + + + Map<String, String> parameters = this.nodeTypeToKeys.get(aaiNodeType).stream().collect(Collectors.toMap( + key -> key, + key -> encodeProp(v.property(key).value().toString()) + )); + + return this.nodeTypeToUri.get(aaiNodeType).buildFromEncodedMap(parameters).toString(); + } + private static String encodeProp(String s) { + try { + return UriUtils.encode(s, "UTF-8"); + } catch (UnsupportedEncodingException e) { + return ""; + } + } + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java b/src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java new file mode 100644 index 0000000..6f759fb --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java @@ -0,0 +1,443 @@ +/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(100)
+@MigrationDangerRating(1)
+@Enabled
+public class MigrateSdnaIvlanData extends Migrator {
+
+ private final String CONFIGURATION_NODE_TYPE = "configuration";
+ private final String EVC_NODE_TYPE = "evc";
+ private final String FORWARDER_NODE_TYPE = "forwarder";
+ private final String FORWRDER_EVC_NODE_TYPE = "forwarder-evc";
+ private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path";
+ private final String PNF_NODE_TYPE = "pnf";
+ private final String P_INTERFACE_NODE_TYPE = "p-interface";
+ private final String LAG_INTERFACE_NODE_TYPE = "lag-interface";
+ private final String SAREA_GLOBAL_CUSTOMER_ID = "8a00890a-e6ae-446b-9dbe-b828dbeb38bd";
+
+ GraphTraversal<Vertex, Vertex> serviceSubscriptionGt;
+
+ private static GraphTraversalSource g = null;
+ private static boolean success = true;
+ private static boolean checkLog = false;
+ private int headerLength;
+ private int migrationSuccess = 0;
+ private int migrationFailure = 0;
+ private int invalidPInterfaceCount = 0;
+ private int invalidLagInterfaceCount = 0;
+
+
+ private static List<String> dmaapMsgList = new ArrayList<String>();
+ private static final String homeDir = System.getProperty("AJSC_HOME");
+
+ private static List<String> validPnfList = new ArrayList<String>();
+ private static List<String> invalidPnfList = new ArrayList<String>();
+
+ private static Map<String, List<String>> validInterfaceMap = new HashMap<String, List<String>>();
+ private static Map<String, List<String>> invalidInterfaceMap = new HashMap<String, List<String>>();
+
+ protected class SdnaIvlanFileData{
+ String evcName;
+ String pnfName;
+ String interfaceAID;
+ int ivlanValue;
+
+ public String getEvcName() {
+ return evcName;
+ }
+ public void setEvcName(String evcName) {
+ this.evcName = evcName;
+ }
+
+ public String getPnfName() {
+ return pnfName;
+ }
+ public void setPnfName(String pnfName) {
+ this.pnfName = pnfName;
+ }
+ public String getInterfaceAID() {
+ return interfaceAID;
+ }
+ public void setInterfaceAID(String interfaceAID) {
+ this.interfaceAID = interfaceAID;
+ }
+
+ public int getIvlanValue() {
+ return ivlanValue;
+ }
+ public void setIvlanValue(int ivlanValue) {
+ this.ivlanValue = ivlanValue;
+ }
+
+ }
+
+ private static ArrayList<SdnaIvlanFileData> ivlanList = new ArrayList<SdnaIvlanFileData>();
+
+ public MigrateSdnaIvlanData(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+ super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+
+ this.g = this.engine.asAdmin().getTraversalSource();
+ this.serviceSubscriptionGt = g.V().has("global-customer-id", SAREA_GLOBAL_CUSTOMER_ID).in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA");
+ }
+
+ @Override
+ public void run() {
+ logger.info("---------- Start migration ----------");
+ String configDir = System.getProperty("BUNDLECONFIG_DIR");
+ if (homeDir == null) {
+ logger.info(this.MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME");
+ success = false;
+ return;
+ }
+ if (configDir == null) {
+ success = false;
+ return;
+ }
+
+ String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+
+ int fileLineCounter = 0;
+
+ String fileName = feedDir+ "ivlanData.csv";
+ logger.info(fileName);
+ logger.info("---------- Processing Entries from file ----------");
+
+
+ try {
+ List<String> lines = Files.readAllLines(Paths.get(fileName));
+ Iterator<String> lineItr = lines.iterator();
+ while (lineItr.hasNext()){
+ String line = lineItr.next().trim();
+ if (!line.isEmpty()) {
+ if (fileLineCounter != 0) {
+
+ try{
+ String[] colList = line.split(",", -1);
+ SdnaIvlanFileData lineData = new SdnaIvlanFileData();
+ lineData.setEvcName(colList[0].trim());
+ lineData.setPnfName(colList[1].trim());
+ lineData.setInterfaceAID(colList[2].trim());
+ lineData.setIvlanValue(Integer.valueOf(colList[3].trim()));
+ ivlanList.add(lineData);
+
+ } catch (Exception e){
+ logger.info(this.MIGRATION_ERROR + " ERROR: Record Format is invalid. Expecting Numeric value for Forwarder_Id and Ivlan_Value. Skipping Record: " + line);
+ this.migrationFailure++;
+ }
+
+ } else {
+ this.headerLength = line.split(",", -1).length;
+ if (this.headerLength < 4){
+ logger.info(this.MIGRATION_ERROR + "ERROR: Input file should have atleast 4 columns");
+ this.success = false;
+ return;
+ }
+ }
+ }
+ fileLineCounter++;
+ }
+
+ processSdnaIvlan();
+
+ int invalidInterfacesCount = getInvalidInterfaceCount();
+
+ logger.info ("\n \n ******* Final Summary for SDN-A IVLAN Migration ********* \n");
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "SDN-A forward-evcs: IVLANs updated: "+ migrationSuccess);
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "Total File Record Count: "+(fileLineCounter - 1));
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "Unprocessed SDNA File Records : "+ migrationFailure);
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "PNFs from Input File not found : "+ Integer.toString(invalidPnfList.size()) + "\n");
+
+
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF + P-INTERFACEs from Input File not found : " + Integer.toString(invalidPInterfaceCount));
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF + LAG-INTERFACEs from Input File not found : " + Integer.toString(invalidLagInterfaceCount));
+ logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF/INTERFACEs from Input File not found : " + Integer.toString(invalidInterfacesCount));
+
+ } catch (FileNotFoundException e) {
+ logger.info(this.MIGRATION_ERROR + "ERROR: Could not find file " + fileName, e.getMessage());
+ success = false;
+ checkLog = true;
+ } catch (NoSuchFileException e) {
+ logger.info(this.MIGRATION_ERROR + "ERROR: Could not find file " + fileName, e.getMessage());
+ success = false;
+ checkLog = true;
+ } catch (IOException e) {
+ logger.info(this.MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e);
+ success = false;
+ } catch (Exception e) {
+ logger.info(this.MIGRATION_ERROR + "encountered exception", e);
+ e.printStackTrace();
+ success = false;
+ }
+
+ }
+ private void processSdnaIvlan() {
+
+ for(int i = 0; i < ivlanList.size(); i ++) {
+ String evc = ivlanList.get(i).getEvcName();
+ String pnf = ivlanList.get(i).getPnfName();
+ String interfaceId = ivlanList.get(i).getInterfaceAID();
+ String ivlanValue = Integer.toString(ivlanList.get(i).getIvlanValue());
+
+ Boolean pnfExists = pnfExists(pnf);
+ GraphTraversal<Vertex, Vertex> forwarderEvcGT;
+ Vertex forwarderEvcVtx = null;
+ String interfaceNodeType;
+ String forwarderEvcId = null;
+
+ if (!pnfExists){
+ migrationFailure++;
+ }else{
+
+ if (interfaceId.contains(".")){
+ interfaceNodeType = P_INTERFACE_NODE_TYPE;
+ }else{
+ interfaceNodeType = LAG_INTERFACE_NODE_TYPE;
+ }
+
+ validateInterface(pnf, interfaceNodeType, interfaceId);
+
+ forwarderEvcGT = g.V()
+ .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE)
+ .in("tosca.relationships.network.BindsTo")
+ .has(AAIProperties.NODE_TYPE, interfaceNodeType).has("interface-name", interfaceId)
+ .in("org.onap.relationships.inventory.ForwardsTo")
+ .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", evc))
+ .out("org.onap.relationships.inventory.Uses")
+ .in("org.onap.relationships.inventory.BelongsTo");
+
+ // fwd-evc not found for pnf + interface
+ if(!forwarderEvcGT.hasNext()){
+ forwarderEvcId = pnf + " " + evc;
+ migrationError(PNF_NODE_TYPE + "/" + EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);
+
+ }
+
+ while(forwarderEvcGT.hasNext()){
+ forwarderEvcVtx = forwarderEvcGT.next();
+
+ // fwd-evc vertex is null
+ if(forwarderEvcVtx == null){
+ forwarderEvcId = pnf + " " + evc;
+ migrationError(PNF_NODE_TYPE + "/" + EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);
+ }
+ // update fwd-evc with ivlan value
+ else{
+
+ forwarderEvcId = forwarderEvcVtx.property("forwarder-evc-id").value().toString();
+ try{
+ forwarderEvcVtx.property("ivlan", ivlanValue);
+ logger.info(String.format("Updating Node Type forwarder-evc Property ivlan value %s", ivlanValue.toString()));
+ this.touchVertexProperties(forwarderEvcVtx, false);
+ updateDmaapList(forwarderEvcVtx);
+ migrationSuccess++;
+
+ }catch (Exception e){
+ logger.info(e.toString());
+ migrationError(FORWRDER_EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);
+ }
+ }
+ }
+ }
+
+ }
+ }
+
+ /**
+ * Description: Validate if pnf node exists in Graph
+ * @param pnf
+ * @return boolean
+ */
+ private boolean pnfExists(String pnf){
+ if (invalidPnfList.contains(pnf)){
+ logger.info(this.MIGRATION_ERROR + "ERROR: PNF value " + pnf + " does not exist.");
+ return false;
+ }
+ if (validPnfList.contains(pnf)){
+ return true;
+ }
+
+ GraphTraversal<Vertex, Vertex> pnfGT = g.V()
+ .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE);
+
+ if(pnfGT.hasNext()){
+ validPnfList.add(pnf);
+ return true;
+ }
+ else{
+ logger.info(this.MIGRATION_ERROR + "ERROR: PNF value " + pnf + " does not exist.");
+ invalidPnfList.add(pnf);
+ return false;
+ }
+
+ }
+
+ /**
+ * Description: Validate if p-interface or lag-interface node exists in Graph
+ * @param pnf
+ * @param interfaceNodeType
+ * @param interfaceName
+ */
+ private void validateInterface(String pnf, String interfaceNodeType, String interfaceName){
+
+ List <String> validInterfaceList;
+ List <String> invalidInterfaceList;
+
+ if(!validInterfaceMap.containsKey(pnf) ){
+ validInterfaceList = new ArrayList<String>();
+ }else{
+ validInterfaceList = validInterfaceMap.get(pnf);
+ }
+
+ if(!invalidInterfaceMap.containsKey(pnf)){
+ invalidInterfaceList = new ArrayList<String>();
+ }else{
+ invalidInterfaceList = invalidInterfaceMap.get(pnf);
+ }
+
+ if(invalidInterfaceList.contains(interfaceName)){
+ logger.info(this.MIGRATION_ERROR + "ERROR PNF " + pnf + " with a " + interfaceNodeType + " of " + interfaceName + " does not exist.");
+ return;
+ }
+ if(validInterfaceList.contains(interfaceName)){
+ return;
+ }
+
+ GraphTraversal<Vertex, Vertex> interfaceGT = g.V()
+ .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE)
+ .in("tosca.relationships.network.BindsTo")
+ .has("interface-name", interfaceName).has(AAIProperties.NODE_TYPE, interfaceNodeType);
+
+ if(interfaceGT.hasNext()){
+ validInterfaceList.add(interfaceName);
+ validInterfaceMap.put(pnf, validInterfaceList);
+ }
+ else{
+ logger.info(this.MIGRATION_ERROR + "ERROR PNF " + pnf + " with a " + interfaceNodeType + " of " + interfaceName + " does not exist.");
+ invalidInterfaceList.add(interfaceName);
+ invalidInterfaceMap.put(pnf, invalidInterfaceList);
+ }
+ }
+
+
+ /**
+ * Description: Error Routine if graph is not updated by input file record
+ * @param nodeType
+ * @param nodeId
+ * @param property
+ * @param propertyValue
+ */
+ private void migrationError(String nodeType, String nodeId, String property, String propertyValue){
+ logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update "
+ + nodeType + " ID " + nodeId + ", " + property + " to value " + propertyValue
+ + ". Node Not Found \n");
+ migrationFailure++;
+ }
+
+ private int getInvalidInterfaceCount(){
+ int interfaceCount = 0;
+
+ for (Map.Entry<String, List<String>> entry: invalidInterfaceMap.entrySet()){
+ String key = entry.getKey();
+ List <String> invalidList = invalidInterfaceMap.get(key);
+
+ for (int i = 0; i < invalidList.size(); i++){
+ if(invalidList.get(i).contains(".")){
+ invalidPInterfaceCount++;
+ }else{
+ invalidLagInterfaceCount++;
+ }
+
+ }
+ interfaceCount = interfaceCount + invalidInterfaceMap.get(key).size();
+ }
+ return interfaceCount;
+ }
+
+ /**
+ * Description: Dmaap Routine
+ * @param v
+ */
+ private void updateDmaapList(Vertex v){
+ String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_" + v.value("resource-version").toString();
+ dmaapMsgList.add(dmaapMsg);
+ logger.info("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList....");
+ }
+
+ @Override
+ public Status getStatus() {
+ if (checkLog) {
+ return Status.CHECK_LOGS;
+ }
+ else if (success) {
+ return Status.SUCCESS;
+ }
+ else {
+ return Status.FAILURE;
+ }
+ }
+
+ @Override
+ public void commit() {
+ engine.commit();
+ createDmaapFiles(dmaapMsgList);
+ }
+
+ @Override
+ public Optional<String[]> getAffectedNodeTypes() {
+ return Optional.of(new String[]{this.FORWRDER_EVC_NODE_TYPE});
+ }
+
+
+ @Override
+ public String getMigrationName() {
+ return "MigrateSdnaIvlanData";
+ }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java b/src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java new file mode 100644 index 0000000..80944ff --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java @@ -0,0 +1,358 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v14; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.structure.*; +import org.janusgraph.core.attribute.Text; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.edges.enums.AAIDirection; +import org.onap.aai.edges.enums.EdgeProperty; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.*; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.springframework.web.util.UriUtils; + +import javax.ws.rs.core.UriBuilder; + +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.util.*; +import java.util.stream.Collectors; + +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.introspection.Introspector; + +@Enabled +@MigrationPriority(10) +@MigrationDangerRating(100) +public class PserverDedupWithDifferentSourcesOfTruth extends EdgeSwingMigrator { + /** + * Instantiates a new migrator. + * + * @param engine + */ + private final String PARENT_NODE_TYPE = "pserver"; + private boolean success = true; + protected Set<Object> seen = new HashSet<>(); + private Map<String, UriBuilder> nodeTypeToUri; + private Map<String, Set<String>> nodeTypeToKeys; + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static List<Introspector> dmaapDeleteList = new ArrayList<Introspector>(); + private static int pserversUpdatedCount = 0; + private static int pserversDeletedCount = 0; + + + private static String[] rctSourceOfTruth = new String[]{"AAIRctFeed", "RCT"}; + private static String[] roSourceOfTruth = new String[]{"AAI-EXTENSIONS", "RO"}; + + List<Vertex> RemoveROList = new ArrayList<>(); + + public PserverDedupWithDifferentSourcesOfTruth(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + createDmaapFilesForDelete(dmaapDeleteList); + + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + return null; + } + + @Override + public String getNodeTypeRestriction() { + return null; + } + + @Override + public String getEdgeLabelRestriction() { + return null; + } + + @Override + public String getEdgeDirRestriction() { + return null; + } + + @Override + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return null; + } + + @Override + public String getMigrationName() { + return "PserverDedupWithDifferentSourcesOfTruth"; + } + + @Override + public void run() { + + int dupCount = 0; + nodeTypeToUri = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect( + Collectors.toMap( + e -> e.getKey(), + e -> UriBuilder.fromPath(e.getValue().getFullGenericURI().replaceAll("\\{"+ e.getKey() + "-", "{")) + )); + + nodeTypeToKeys = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect( + Collectors.toMap( + e -> e.getKey(), + e -> e.getValue().getKeys() + )); + + List<Vertex> rctList = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within(rctSourceOfTruth)).toList(); + List<Vertex> roList = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within(roSourceOfTruth)).toList(); + + logger.info("Total number of RCT sourced pservers in A&AI :" +rctList.size()); + logger.info("Total number of RO sourced pservers in A&AI :" +roList.size()); + + for(int i=0;i<rctList.size();i++){ + Vertex currRct = rctList.get(i); + Object currRctFqdn = null; + if (currRct.property("fqdn").isPresent() && (currRct.property("fqdn").value() != null)){ + currRctFqdn = currRct.property("fqdn").value(); + logger.info("\n"); + logger.info("Current RCT Pserver hostname: " + currRct.property("hostname").value().toString() + " fqdn: " +currRct.property("fqdn").value().toString()); + for(int j=0;j<roList.size();j++){ + Vertex currRo = roList.get(j); + Object currRoHostname = null; + if (currRo.property("hostname").isPresent()){ + currRoHostname = currRo.property("hostname").value(); + } + if (currRoHostname != null){ + String[] rctFqdnSplit = currRctFqdn.toString().split("\\."); + String[] roHostnameSplit = currRoHostname.toString().split("\\."); + if (rctFqdnSplit.length >0 && roHostnameSplit.length > 0){ + if(!rctFqdnSplit[0].isEmpty() && !roHostnameSplit[0].isEmpty() && rctFqdnSplit[0].equals(roHostnameSplit[0])){ + logger.info("\tPserver match found - RO Pserver with hostname: "+currRo.property("hostname").value().toString()); + dupCount++; + try { + mergePservers(currRct,currRo); + break; + } catch (UnsupportedEncodingException e) { + success = false; + } catch (AAIException e) { + success = false; + } + } + } + } + } + } + } + RemoveROList.forEach(v ->v.remove()); + logger.info ("\n \n ******* Migration Summary Counts for Dedup of RCT and RO sourced pservers ********* \n"); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RCT: " +rctList.size()); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RO: " +roList.size()); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate pserver count: "+ dupCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT updated: "+pserversUpdatedCount); + logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO deleted: "+ pserversDeletedCount +"\n"); + } + private GraphTraversalSource graphTraversalSource() { + return this.engine.asAdmin().getTraversalSource(); + } + + + public void mergePservers(Vertex rct, Vertex ro) throws UnsupportedEncodingException, AAIException { + Introspector obj = serializer.getLatestVersionView(ro); + dmaapDeleteList.add(obj); + rct.property("fqdn",ro.property("hostname").value().toString()); + dropComplexEdge(ro); + dropMatchingROPInterfaces(ro, rct); + dropMatchingROLagInterfaces(ro, rct); + swingEdges(ro, rct, null, null, "BOTH"); + modifyChildrenUri(rct); + if(!(rct.property("pserver-id").isPresent())){ + rct.property("pserver-id",UUID.randomUUID().toString()); + } + String dmaapMsg = System.nanoTime() + "_" + rct.id().toString() + "_" + rct.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + pserversUpdatedCount++; + logger.info("\tAdding RO pserver to the delete list...."); + RemoveROList.add(ro); + pserversDeletedCount++; + } + + private void dropMatchingROPInterfaces(Vertex ro, Vertex rct) { + Map<String, Vertex> removeROPIntMap = new HashMap<String, Vertex>(); + List<Vertex> pIntList = graphTraversalSource().V(ro).in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList(); + if (pIntList != null && !pIntList.isEmpty()) { + Iterator<Vertex> pIntListItr = pIntList.iterator(); + while(pIntListItr.hasNext()){ + Vertex pInt = pIntListItr.next(); + + removeROPIntMap.put(pInt.property("interface-name").value().toString(), pInt); + } + Set<String> interfaceNameSet = removeROPIntMap.keySet(); + List<Vertex> rctPIntList = graphTraversalSource().V(rct).in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList(); + if (rctPIntList != null && !rctPIntList.isEmpty()){ + Iterator<Vertex> rctPIntListItr = rctPIntList.iterator(); + while(rctPIntListItr.hasNext()){ + Vertex rctPInt = rctPIntListItr.next(); + String rctIntfName = rctPInt.property("interface-name").value().toString(); + if (interfaceNameSet.contains(rctIntfName)){ + Vertex pIntToRemoveFromROPserver = removeROPIntMap.get(rctIntfName); + String roPIntUri = "roPIntUri"; + if (pIntToRemoveFromROPserver.property("aai-uri").isPresent()){ + roPIntUri = pIntToRemoveFromROPserver.property("aai-uri").value().toString(); + } + Edge roPIntToPserverEdge = pIntToRemoveFromROPserver.edges(Direction.OUT, "tosca.relationships.network.BindsTo").next(); + roPIntToPserverEdge.remove(); + pIntToRemoveFromROPserver.remove(); + logger.info("\tRemoved p-interface "+roPIntUri + " and its edge to RO pserver, not swinging the p-interface to RCT pserver"); + } + } + } + } + } + + private void dropMatchingROLagInterfaces(Vertex ro, Vertex rct) { + Map<String, Vertex> removeROLagIntMap = new HashMap<String, Vertex>(); + List<Vertex> lagIntList = graphTraversalSource().V(ro).in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList(); + if (lagIntList != null && !lagIntList.isEmpty()) { + Iterator<Vertex> lagIntListItr = lagIntList.iterator(); + while(lagIntListItr.hasNext()){ + Vertex lagInt = lagIntListItr.next(); + + removeROLagIntMap.put(lagInt.property("interface-name").value().toString(), lagInt); + } + Set<String> interfaceNameSet = removeROLagIntMap.keySet(); + List<Vertex> rctLagIntList = graphTraversalSource().V(rct).in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList(); + if (rctLagIntList != null && !rctLagIntList.isEmpty()){ + Iterator<Vertex> rctLagIntListItr = rctLagIntList.iterator(); + while(rctLagIntListItr.hasNext()){ + Vertex rctPInt = rctLagIntListItr.next(); + String rctIntfName = rctPInt.property("interface-name").value().toString(); + if (interfaceNameSet.contains(rctIntfName)){ + Vertex lagIntToRemoveFromROPserver = removeROLagIntMap.get(rctIntfName); + String roLagIntUri = "roPIntUri"; + if (lagIntToRemoveFromROPserver.property("aai-uri").isPresent()){ + roLagIntUri = lagIntToRemoveFromROPserver.property("aai-uri").value().toString(); + } + Edge roLagIntToPserverEdge = lagIntToRemoveFromROPserver.edges(Direction.OUT, "tosca.relationships.network.BindsTo").next(); + roLagIntToPserverEdge.remove(); + lagIntToRemoveFromROPserver.remove(); + logger.info("\tRemoved lag-interface "+roLagIntUri + " and its edge to RO pserver, not swinging the lag-interface to RCT pserver"); + } + } + } + } + } + + public void dropComplexEdge(Vertex ro){ + List<Vertex> locatedInEdgeVertexList = graphTraversalSource().V(ro).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").toList(); + if (locatedInEdgeVertexList != null && !locatedInEdgeVertexList.isEmpty()){ + Iterator<Vertex> locatedInEdgeVertexListItr = locatedInEdgeVertexList.iterator(); + while (locatedInEdgeVertexListItr.hasNext()){ + Vertex v = locatedInEdgeVertexListItr.next(); + if ("complex".equalsIgnoreCase(v.property("aai-node-type").value().toString())){ + Edge pserverToComplexEdge = v.edges(Direction.IN, "org.onap.relationships.inventory.LocatedIn").next(); + pserverToComplexEdge.remove(); + } + } + } + } + + + private void modifyChildrenUri(Vertex v) throws UnsupportedEncodingException, AAIException { + Set<Vertex> parentSet = new HashSet<>(); + parentSet.add(v); + verifyOrAddUri("", parentSet); + } + + + protected void verifyOrAddUri(String parentUri, Set<Vertex> vertexSet) throws UnsupportedEncodingException, AAIException { + + + String correctUri; + for (Vertex v : vertexSet) { + seen.add(v.id()); + //if there is an issue generating the uri catch, log and move on; + try { + correctUri = parentUri + this.getUriForVertex(v); + } catch (Exception e) { + logger.error("Vertex has issue generating uri " + e.getMessage() + "\n\t" + this.asString(v)); + continue; + } + try { + v.property(AAIProperties.AAI_URI, correctUri); + } catch (Exception e) { + logger.info("\t" + e.getMessage() + "\n\t" + this.asString(v)); + } + if (!v.property(AAIProperties.AAI_UUID).isPresent()) { + v.property(AAIProperties.AAI_UUID, UUID.randomUUID().toString()); + } + this.verifyOrAddUri(correctUri, getChildren(v)); + } + } + + protected Set<Vertex> getChildren(Vertex v) { + + Set<Vertex> children = graphTraversalSource().V(v).bothE().not(__.has(EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString())).otherV().toSet(); + + return children.stream().filter(child -> !seen.contains(child.id())).collect(Collectors.toSet()); + } + + protected String getUriForVertex(Vertex v) { + String aaiNodeType = v.property(AAIProperties.NODE_TYPE).value().toString(); + + + Map<String, String> parameters = this.nodeTypeToKeys.get(aaiNodeType).stream().collect(Collectors.toMap( + key -> key, + key -> encodeProp(v.property(key).value().toString()) + )); + + return this.nodeTypeToUri.get(aaiNodeType).buildFromEncodedMap(parameters).toString(); + } + private static String encodeProp(String s) { + try { + return UriUtils.encode(s, "UTF-8"); + } catch (UnsupportedEncodingException e) { + return ""; + } + } + +} diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java b/src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java new file mode 100644 index 0000000..3152436 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java @@ -0,0 +1,115 @@ +/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.migration.ValueMigrator;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateBooleanDefaultsToFalse extends ValueMigrator {
+ protected static final String VNF_NODE_TYPE = "generic-vnf";
+ protected static final String VSERVER_NODE_TYPE = "vserver";
+ protected static final String VNFC_NODE_TYPE = "vnfc";
+ protected static final String L3NETWORK_NODE_TYPE = "l3-network";
+ protected static final String SUBNET_NODE_TYPE = "subnet";
+ protected static final String LINTERFACE_NODE_TYPE = "l-interface";
+ protected static final String VFMODULE_NODE_TYPE = "vf-module";
+
+ private static Map<String, Map> map;
+ private static Map<String, Boolean> pair1;
+ private static Map<String, Boolean> pair2;
+ private static Map<String, Boolean> pair3;
+ private static Map<String, Boolean> pair4;
+ private static Map<String, Boolean> pair5;
+ private static Map<String, Boolean> pair6;
+
+ public MigrateBooleanDefaultsToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+ super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setBooleanDefaultsToFalse(), false);
+
+ }
+
+ private static Map<String, Map> setBooleanDefaultsToFalse(){
+ map = new HashMap<>();
+ pair1 = new HashMap<>();
+ pair2 = new HashMap<>();
+ pair3 = new HashMap<>();
+ pair4 = new HashMap<>();
+ pair5 = new HashMap<>();
+ pair6 = new HashMap<>();
+
+
+ pair1.put("is-closed-loop-disabled", false);
+ map.put("generic-vnf", pair1);
+ map.put("vnfc", pair1);
+ map.put("vserver", pair1);
+
+ pair2.put("is-bound-to-vpn", false);
+ pair2.put("is-provider-network", false);
+ pair2.put("is-shared-network", false);
+ pair2.put("is-external-network", false);
+ map.put("l3-network", pair2);
+
+ pair3.put("dhcp-enabled", false);
+ map.put("subnet", pair3);
+
+ pair4.put("is-port-mirrored", false);
+ pair4.put("is-ip-unnumbered", false);
+ map.put("l-interface", pair4);
+
+ pair5.put("is-base-vf-module", false);
+ map.put("vf-module", pair5);
+
+ pair6.put("is-ip-unnumbered", false);
+ map.put("vlan", pair6);
+
+ return map;
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.SUCCESS;
+ }
+
+ @Override
+ public Optional<String[]> getAffectedNodeTypes() {
+ return Optional.of(new String[]{VNF_NODE_TYPE,VSERVER_NODE_TYPE,VNFC_NODE_TYPE,L3NETWORK_NODE_TYPE,SUBNET_NODE_TYPE,LINTERFACE_NODE_TYPE,VFMODULE_NODE_TYPE});
+ }
+
+ @Override
+ public String getMigrationName() {
+ return "MigrateBooleanDefaultsToFalse";
+ }
+
+}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java b/src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java new file mode 100644 index 0000000..6104d9d --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java @@ -0,0 +1,361 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v15; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Migrator; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(20) +@MigrationDangerRating(2) +@Enabled +public class MigrateCloudRegionUpgradeCycle extends Migrator { + + protected static final String CLOUD_REGION_NODE_TYPE = "cloud-region"; + protected static final String CLOUD_OWNER = "cloud-owner"; + protected static final String CLOUD_REGION_ID = "cloud-region-id"; + protected static final String UPGRADE_CYCLE = "upgrade-cycle"; + private static final String homeDir = System.getProperty("AJSC_HOME"); + + protected final AtomicInteger skippedRowsCount = new AtomicInteger(0); + protected final AtomicInteger processedRowsCount = new AtomicInteger(0); + + private boolean success = true; + private GraphTraversalSource g = null; + protected int headerLength; + + protected final AtomicInteger falloutRowsCount = new AtomicInteger(0); + + public MigrateCloudRegionUpgradeCycle(TransactionalGraphEngine engine, LoaderFactory loaderFactory, + EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public void run() { + logger.info("---------- Start Updating upgrade-cycle for cloud-region ----------"); + + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info("ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/CloudRegion-ART-migration-data/"; + String fileName = feedDir + "CloudRegion-ART-migration-data.csv"; + logger.info(fileName); + logger.info("---------- Processing Region Entries from file ----------"); + + Map cloudRegionVertexMap = new HashMap(); + + try { + int cloudRegionCount = 0; + int cloudRegionErrorCount = 0; + ArrayList data = loadFile(fileName); + + Map<String, String> cloudRegionMapFromART = (Map) data.get(0); + Map<String, String> cloudAliasMapFromART = (Map) data.get(1); + + List<Vertex> cloudRegionList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).has(CLOUD_OWNER, "att-aic").toList(); + + for (Vertex vertex : cloudRegionList) { + String cloudRegionId = null; + cloudRegionId = getCloudRegionIdNodeValue(vertex); + cloudRegionVertexMap.put(cloudRegionId, vertex); + } + + for (Map.Entry<String, String> entry : cloudRegionMapFromART.entrySet()) { + boolean regionFound = false; + String regionFromART = ""; + String aliasFromART = ""; + String vertexKey = ""; + + regionFromART = (String) entry.getKey(); + + if (cloudRegionVertexMap.containsKey(regionFromART)) { + regionFound = true; + vertexKey = regionFromART; + } else { + aliasFromART = cloudAliasMapFromART.get(regionFromART).toString(); + if (aliasFromART != null && !"".equals(aliasFromART) + && cloudRegionVertexMap.containsKey(aliasFromART)) { + regionFound = true; + vertexKey = aliasFromART; + } + } + + if (regionFound) { + String upgradeCycle = ""; + try { + upgradeCycle = (String) entry.getValue(); + + if (upgradeCycle != null && !"".equals(upgradeCycle)) { + Vertex vertex = (Vertex) cloudRegionVertexMap.get(vertexKey); + vertex.property(UPGRADE_CYCLE, upgradeCycle); + this.touchVertexProperties(vertex, false); + logger.info("Updated cloud-region, upgrade-cycle to " + upgradeCycle + + " having cloud-region-id : " + vertexKey); + cloudRegionCount++; + } else { + logger.info("upgrade-cycle value from ART is null or empty for the cloud-region-id : " + + vertexKey); + } + } catch (Exception e) { + success = false; + cloudRegionErrorCount++; + logger.error(MIGRATION_ERROR + + "encountered exception for upgrade-cycle update having cloud-region-id :" + vertexKey, + e); + } + } else { + logger.info("Region "+regionFromART+" from ART is not found in A&AI"); + } + + } + + logger.info("\n \n ******* Final Summary of Updated upgrade-cycle for cloud-region Migration ********* \n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of cloud-region updated: " + cloudRegionCount + "\n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Number of cloud-region failed to update due to error : " + + cloudRegionErrorCount + "\n"); + + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + /** + * Load file to the map for processing + * + * @param fileName + * @return + * @throws Exception + */ + protected ArrayList loadFile(String fileName) throws Exception { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + return this.getFileContents(lines); + } + + /** + * Get lines from file. + * + * @param lines + * @return + * @throws Exception + */ + protected ArrayList getFileContents(List<String> lines) throws Exception { + + final Map<String, String> regionMap = new ConcurrentHashMap<>(); + final Map<String, String> aliasMap = new ConcurrentHashMap<>(); + final ArrayList fileContent = new ArrayList(); + + processAndRemoveHeader(lines); + + logger.info("Total rows count excluding header: " + lines.size()); + + lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.asList(line.split("\\s*,\\s*", -1))) + .map(this::processRegionUpgradeCycle).filter(Optional::isPresent).map(Optional::get).forEach(p -> { + processedRowsCount.getAndIncrement(); + String pnfName = p.getValue0(); + if (!regionMap.containsKey(pnfName)) { + regionMap.put(p.getValue0(), p.getValue1()); + } + }); + + fileContent.add(regionMap); + + lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.asList(line.split("\\s*,\\s*", -1))) + .map(this::processRegionAlias).filter(Optional::isPresent).map(Optional::get).forEach(p -> { + processedRowsCount.getAndIncrement(); + String pnfName = p.getValue0(); + if (!aliasMap.containsKey(pnfName)) { + aliasMap.put(p.getValue0(), p.getValue1()); + } + }); + fileContent.add(aliasMap); + return fileContent; + + } + + /** + * Verify line has the necessary details. + * + * @param line + * @return + */ + protected boolean verifyLine(List<String> line) { + if (line.size() != headerLength) { + logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + line.size() + + " instead."); + this.skippedRowsCount.getAndIncrement(); + return false; + } + return true; + } + + /** + * * Get the pnf name and interface name from the line. + * + * @param line + * @return + */ + protected Optional<Pair<String, String>> processRegionAlias(List<String> line) { + //logger.info("Processing line... " + line.toString()); + int lineSize = line.size(); + if (lineSize < 4) { + logger.info("Skipping line, does not contain region and/or upgrade-cycle columns"); + skippedRowsCount.getAndIncrement(); + return Optional.empty(); + } + + String cloudRegion = line.get(0); + String upgradeCycle = line.get(1).replaceAll("^\"|\"$", "").replaceAll("\\s+", ""); + + if (cloudRegion.isEmpty()) { + logger.info("Line missing cloudRegion name" + line); + falloutRowsCount.getAndIncrement(); + return Optional.empty(); + } + + return Optional.of(Pair.with(cloudRegion, upgradeCycle)); + } + + /** + * * Get the pnf name and interface name from the line. + * + * @param line + * @return + */ + protected Optional<Pair<String, String>> processRegionUpgradeCycle(List<String> line) { + //logger.info("Processing line... " + line.toString()); + int lineSize = line.size(); + if (lineSize < 4) { + logger.info("Skipping line, does not contain region and/or upgrade-cycle columns"); + skippedRowsCount.getAndIncrement(); + return Optional.empty(); + } + + String cloudRegion = line.get(0); + String upgradeCycle = line.get(3).replaceAll("^\"|\"$", "").replaceAll("\\s+", ""); + + if (cloudRegion.isEmpty()) { + logger.info("Line missing cloudRegion name" + line); + falloutRowsCount.getAndIncrement(); + return Optional.empty(); + } + + return Optional.of(Pair.with(cloudRegion, upgradeCycle)); + } + + /** + * Verify header of the csv and remove it from the list. + * + * @param lines + * @throws Exception + */ + protected String processAndRemoveHeader(List<String> lines) throws Exception { + String firstLine; + if (lines.isEmpty()) { + String msg = "ERROR: Missing Header in file"; + success = false; + logger.error(msg); + throw new Exception(msg); + } else { + firstLine = lines.get(0); + } + + this.headerLength = firstLine.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength); + if (this.headerLength < 4) { + String msg = "ERROR: Input file should have 4 columns"; + success = false; + logger.error(msg); + throw new Exception(msg); + } + + return lines.remove(0); + } + + private String getCloudRegionIdNodeValue(Vertex vertex) { + String propertyValue = ""; + if (vertex != null && vertex.property(CLOUD_REGION_ID).isPresent()) { + propertyValue = vertex.property(CLOUD_REGION_ID).value().toString(); + } + return propertyValue; + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[] { CLOUD_REGION_NODE_TYPE }); + } + + @Override + public String getMigrationName() { + return "MigrateCloudRegionUpgradeCycle"; + } + +} diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java b/src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java new file mode 100644 index 0000000..d00d4f1 --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java @@ -0,0 +1,99 @@ +/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.migration.ValueMigrator;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateInMaintDefaultToFalse extends ValueMigrator {
+
+ protected static final String VNF_NODE_TYPE = "generic-vnf";
+ protected static final String LINTERFACE_NODE_TYPE = "l-interface";
+ protected static final String LAG_INTERFACE_NODE_TYPE = "lag-interface";
+ protected static final String LOGICAL_LINK_NODE_TYPE = "logical-link";
+ protected static final String PINTERFACE_NODE_TYPE = "p-interface";
+ protected static final String VLAN_NODE_TYPE = "vlan";
+ protected static final String VNFC_NODE_TYPE = "vnfc";
+ protected static final String VSERVER_NODE_TYPE = "vserver";
+ protected static final String PSERVER_NODE_TYPE = "pserver";
+ protected static final String PNF_NODE_TYPE = "pnf";
+ protected static final String NOS_SERVER_NODE_TYPE = "nos-server";
+
+ private static Map<String, Map> map;
+ private static Map<String, Boolean> pair;
+
+ public MigrateInMaintDefaultToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+ super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setInMaintToFalse(), false);
+ }
+
+ private static Map<String, Map> setInMaintToFalse(){
+ map = new HashMap<>();
+ pair = new HashMap<>();
+
+ pair.put("in-maint", false);
+
+ map.put("generic-vnf", pair);
+ map.put("l-interface", pair);
+ map.put("lag-interface", pair);
+ map.put("logical-link", pair);
+ map.put("p-interface", pair);
+ map.put("vlan", pair);
+ map.put("vnfc", pair);
+ map.put("vserver", pair);
+ map.put("pserver", pair);
+ map.put("pnf", pair);
+ map.put("nos-server", pair);
+
+ return map;
+ }
+
+ @Override
+ public Status getStatus() {
+ return Status.SUCCESS;
+ }
+
+ @Override
+ public Optional<String[]> getAffectedNodeTypes() {
+ return Optional.of(new String[]{VNF_NODE_TYPE,LINTERFACE_NODE_TYPE,LAG_INTERFACE_NODE_TYPE,LOGICAL_LINK_NODE_TYPE,PINTERFACE_NODE_TYPE,VLAN_NODE_TYPE,VNFC_NODE_TYPE,VSERVER_NODE_TYPE,PSERVER_NODE_TYPE,PNF_NODE_TYPE,NOS_SERVER_NODE_TYPE});
+ }
+
+ @Override
+ public String getMigrationName() {
+ return "MigrateInMaintDefaultToFalse";
+ }
+
+}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java b/src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java new file mode 100644 index 0000000..3ae17bd --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java @@ -0,0 +1,733 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v15; +/*- + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.edges.enums.EdgeType; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.EdgeSwingMigrator; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(26) +@MigrationDangerRating(100) +@Enabled +public class MigrateRadcomChanges extends EdgeSwingMigrator { + + private final String SERVICE_MODEL_TYPE = "Service"; + private final String RESOURCE_MODEL_TYPE = "VNF-Resource"; + private final String MODEL_INVARIANT_ID = "model-invariant-id"; + private final String MODEL_INVARIANT_ID_LOCAL = "model-invariant-id-local"; + private final String MODEL_VERSION_ID = "model-version-id"; + private final String MODEL_VERSION_ID_LOCAL = "model-version-id-local"; + private final String MODEL_CUSTOMIZATION_ID = "model-customization-id"; + private final String PERSONA_MODEL_VERSION = "persona-model-version"; + private final String GENERIC_VNF = "generic-vnf"; + private final String VNF_NAME = "vnf-name"; + private final String VNF_TYPE = "vnf-type"; + private final String SERVICE_INSTANCE = "service-instance"; + private final String SERVICE_INSTANCE_ID = "service-instance-id"; + private final String VF_MODULE = "vf-module"; + private final String VF_MODULE_ID = "vf-module-id"; + private final String MODEL = "model"; + private final String MODEL_VER = "model-ver"; + private final String MODEL_NAME = "model-name"; + private final String MODEL_VERSION = "model-version"; + private final String MODEL_ELEMENT = "model-element"; + private final String VSERVER = "vserver"; + private final String VSERVER_ID = "vserver-id"; + private final String IMAGE = "image"; + private final String IMAGE_NAME = "image-name"; + private final String TENANT = "tenant"; + private final String CLOUD_REGION = "cloud-region"; + + private static boolean success = true; + private static boolean checkLog = false; + private static GraphTraversalSource g = null; + private int headerLength; + private int genericVnfMigrationSuccess = 0; + private int genericVnfMigrationFailure = 0; + private int serviceInstanceMigrationSuccess = 0; + private int serviceInstanceMigrationFailure = 0; + private int vfModuleMigrationSuccess = 0; + private int vfModuleMigrationFailure = 0; + private int imageMigrationSuccess = 0; + private int imageMigrationFailure = 0; + + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static final String homeDir = System.getProperty("AJSC_HOME"); + + protected class VfModuleFileData { + String vfModuleId; + String vfModuleModelName; + String imageName; + + public VfModuleFileData(String vfModuleId, String vfModuleModelName, String imageName) { + this.vfModuleId = vfModuleId; + this.vfModuleModelName = vfModuleModelName; + this.imageName = imageName; + } + + public String getVfModuleId() { + return vfModuleId; + } + public void setVfModuleId(String vfModuleId) { + this.vfModuleId = vfModuleId; + } + public String getVfModuleModelName() { + return vfModuleModelName; + } + public void setVfModuleModelName(String vfModuleModelName) { + this.vfModuleModelName = vfModuleModelName; + } + public String getImageName() { + return imageName; + } + public void setImageName(String imageName) { + this.imageName = imageName; + } + } + + + + public MigrateRadcomChanges(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + g = this.engine.asAdmin().getTraversalSource(); + } + + @Override + public void executeModifyOperation() { + logger.info("---------- Start migration ----------"); + String configDir = System.getProperty("BUNDLECONFIG_DIR"); + if (homeDir == null) { + logger.info(MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME"); + success = false; + return; + } + if (configDir == null) { + success = false; + return; + } + + ArrayList<VfModuleFileData> vfModuleFileLineList = new ArrayList<VfModuleFileData>(); + + String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/radcom-changes/"; + String fileName = feedDir+ "INPUT-MODEL.csv"; + int genericVnfFileLineCounter = 0; + + logger.info(fileName); + logger.info("---------- Reading all file types and vf-modules ----------"); + ArrayList<String> fileTypeList = new ArrayList<String>(); + try { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + int typeFileLineCounter = 0; + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + if (!line.isEmpty()) { + if (typeFileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + if(!colList[0].equalsIgnoreCase(SERVICE_MODEL_TYPE) && !colList[0].equalsIgnoreCase(RESOURCE_MODEL_TYPE)) { + vfModuleFileLineList.add(new VfModuleFileData(colList[0], colList[5], colList[6])); + } + if(!colList[1].isEmpty() && !fileTypeList.contains(colList[1])) { + fileTypeList.add(colList[1]); + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength + "\n"); + if (this.headerLength != 7 ){ + logger.info(MIGRATION_ERROR + "ERROR: Input file should have 7 columns"); + success = false; + return; + } + } + } + typeFileLineCounter++; + } + } catch (FileNotFoundException e) { + logger.info(MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info(MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info(MIGRATION_ERROR + "encountered exception", e); + e.printStackTrace(); + success = false; + } + + int numberOfFileTypes = fileTypeList.size(); + for(int i = 0; i < numberOfFileTypes; i++) { + String newServiceModelInvariantId = ""; + String newServiceModelVersionId = ""; + String newResourceModelInvariantId = ""; + String newResourceModelVersionId = ""; + String newResourceModelCustomizationId = ""; + ArrayList<String> genericVnfList = new ArrayList<String>(); + + + int modelFileLineCounter = 0; + genericVnfFileLineCounter = 0; + fileName = feedDir+ "INPUT-MODEL.csv"; + + logger.info(fileName); + logger.info("---------- Processing Entries from file ----------"); + try { + List<String> lines = Files.readAllLines(Paths.get(fileName)); + Iterator<String> lineItr = lines.iterator(); + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + if (!line.isEmpty()) { + if (modelFileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + if(colList[1].equals(fileTypeList.get(i)) && colList[0].equalsIgnoreCase(SERVICE_MODEL_TYPE)) { + newServiceModelInvariantId = colList[2]; + newServiceModelVersionId = colList[3]; + } + else if(colList[1].equals(fileTypeList.get(i)) && colList[0].equalsIgnoreCase(RESOURCE_MODEL_TYPE)) { + newResourceModelInvariantId = colList[2]; + newResourceModelVersionId = colList[3]; + newResourceModelCustomizationId = colList[4]; + } + } + } + modelFileLineCounter++; + } + fileName = feedDir+ "INPUT-VNF.csv"; + logger.info(fileName); + logger.info("---------- Processing Entries from file ----------"); + lines = Files.readAllLines(Paths.get(fileName)); + lineItr = lines.iterator(); + while (lineItr.hasNext()){ + String line = lineItr.next().replace("\n", "").replace("\r", ""); + if (!line.isEmpty()) { + if (genericVnfFileLineCounter != 0) { + String[] colList = line.split("\\s*,\\s*", -1); + if(colList[1].equals(fileTypeList.get(i))) { + genericVnfList.add(colList[0]); + } + } else { + this.headerLength = line.split("\\s*,\\s*", -1).length; + logger.info("headerLength: " + headerLength + "\n"); + if (this.headerLength != 2){ + logger.info(MIGRATION_ERROR + "ERROR: Input file should have 2 columns"); + success = false; + return; + } + } + } + genericVnfFileLineCounter++; + } + updateGenericVnfs(fileTypeList.get(i), genericVnfList, newServiceModelInvariantId, newServiceModelVersionId, + newResourceModelInvariantId, newResourceModelVersionId, newResourceModelCustomizationId, vfModuleFileLineList); + } catch (FileNotFoundException e) { + logger.info(MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage()); + success = false; + checkLog = true; + } catch (IOException e) { + logger.info(MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info(MIGRATION_ERROR + "encountered exception", e); + e.printStackTrace(); + success = false; + } + } + logger.info ("\n \n ******* Final Summary for RADCOM Change Migration ********* \n"); + logger.info(MIGRATION_SUMMARY_COUNT + "Total generic-vnfs in File: "+(genericVnfFileLineCounter + 1)); + logger.info(MIGRATION_SUMMARY_COUNT + " generic-vnfs processed: "+ genericVnfMigrationSuccess); + logger.info(MIGRATION_SUMMARY_COUNT + " generic-vnfs failed to process: "+ genericVnfMigrationFailure); + logger.info(MIGRATION_SUMMARY_COUNT + " service-instances processed: "+ serviceInstanceMigrationSuccess); + logger.info(MIGRATION_SUMMARY_COUNT + " service-instances failed to process: "+ serviceInstanceMigrationFailure); + logger.info(MIGRATION_SUMMARY_COUNT + " vf-modules processed: "+ vfModuleMigrationSuccess); + logger.info(MIGRATION_SUMMARY_COUNT + " vf-modules failed to process: "+ vfModuleMigrationFailure); + logger.info(MIGRATION_SUMMARY_COUNT + " images processed: "+ imageMigrationSuccess); + logger.info(MIGRATION_SUMMARY_COUNT + " images failed to process: "+ imageMigrationFailure +"\n"); + } + + private void updateGenericVnfs(String vnfType, ArrayList<String> genericVnfList, String newServiceModelInvariantId, + String newServiceModelVersionId, String newResourceModelInvariantId, String newResourceModelVersionId, + String newResourceModelCustomizationId, ArrayList<VfModuleFileData> vfModuleFileLineList) { + int numberOfNames = genericVnfList.size(); + Vertex newModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL). + has(MODEL_INVARIANT_ID, newResourceModelInvariantId).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, newResourceModelVersionId); + if(!modelVerNodeList.hasNext()) { + logger.info(MIGRATION_ERROR + "ERROR: Model " + newResourceModelInvariantId + " with model-ver " + + newResourceModelVersionId + " does not exist in database \n"); + for(int i = 0; i < numberOfNames; i++) { + genericVnfMigrationFailure++; + } + } + else { + newModelVerNode = modelVerNodeList.next(); + for(int i = 0; i < numberOfNames; i++) { + GraphTraversal<Vertex, Vertex> genericVnfNodeList = g.V().has(AAIProperties.NODE_TYPE, GENERIC_VNF). + has(VNF_NAME, genericVnfList.get(i)).has(VNF_TYPE, vnfType); + if(!genericVnfNodeList.hasNext()) { + logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + + " Graph Traversal failed \n"); + genericVnfMigrationFailure++; + } + while (genericVnfNodeList.hasNext()) { + Vertex genericVnfVtx = genericVnfNodeList.next(); + boolean updateSuccess = false; + if (genericVnfVtx != null) { + logger.info("Updating generic-vnf " + genericVnfVtx.value(VNF_NAME) + " with " + + "current model-invariant-id " + + (genericVnfVtx.property(MODEL_INVARIANT_ID).isPresent() + ? genericVnfVtx.value(MODEL_INVARIANT_ID) : "null") + + ", current model-version-id " + + (genericVnfVtx.property(MODEL_VERSION_ID).isPresent() + ? genericVnfVtx.value(MODEL_VERSION_ID) : "null") + + ", and current model-customization-id " + + (genericVnfVtx.property(MODEL_CUSTOMIZATION_ID).isPresent() + ? genericVnfVtx.value(MODEL_CUSTOMIZATION_ID) : "null") + + " to use model-invariant-id " + newResourceModelInvariantId + "," + + " model-version-id " + newResourceModelVersionId + " and model-customization-id " + + newResourceModelCustomizationId); + try { + Vertex oldModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(genericVnfVtx).out("org.onap.relationships.inventory.IsA") + .has(AAIProperties.NODE_TYPE, MODEL_VER); + if(modelVerQuery.hasNext()) { + oldModelVerNode = modelVerQuery.next(); + } + genericVnfVtx.property(MODEL_INVARIANT_ID_LOCAL, newResourceModelInvariantId); + genericVnfVtx.property(MODEL_VERSION_ID_LOCAL, newResourceModelVersionId); + genericVnfVtx.property(MODEL_CUSTOMIZATION_ID, newResourceModelCustomizationId); + if(newModelVerNode.property(MODEL_VERSION).isPresent()) { + genericVnfVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION)); + } + this.touchVertexProperties(genericVnfVtx, false); + if(oldModelVerNode != null) { + this.swingEdges(oldModelVerNode, newModelVerNode, GENERIC_VNF, "org.onap.relationships.inventory.IsA", "IN"); + } + else { + this.createPrivateEdge(newModelVerNode, genericVnfVtx); + } + updateSuccess = true; + } catch (Exception e) { + logger.info(e.toString()); + logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + "\n"); + genericVnfMigrationFailure++; + } + if(updateSuccess) { + String dmaapMsg = System.nanoTime() + "_" + genericVnfVtx.id().toString() + "_" + + genericVnfVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("Update of generic-vnf " + genericVnfList.get(i) + " successful \n"); + genericVnfMigrationSuccess++; + updateServiceInstances(vnfType, genericVnfList.get(i), newServiceModelInvariantId, + newServiceModelVersionId); + updateVfModules(vnfType, genericVnfList.get(i), newResourceModelInvariantId, newResourceModelVersionId, + vfModuleFileLineList); + } + } + else { + logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + + " Graph Traversal returned an empty vertex \n"); + genericVnfMigrationFailure++; + } + } + } + } + } + + private void updateServiceInstances(String vnfType, String vnfName, String newServiceModelInvariantId, + String newServiceModelVersionId) { + GraphTraversal<Vertex, Vertex> serviceInstanceNodeList = g.V(). + has(AAIProperties.NODE_TYPE, GENERIC_VNF).has(VNF_NAME, vnfName).has(VNF_TYPE, vnfType). + in("org.onap.relationships.inventory.ComposedOf").has(AAIProperties.NODE_TYPE, SERVICE_INSTANCE); + Vertex newModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL). + has(MODEL_INVARIANT_ID, newServiceModelInvariantId).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, newServiceModelVersionId); + if(!modelVerNodeList.hasNext()) { + logger.info(MIGRATION_ERROR + "ERROR: Model " + newServiceModelInvariantId + " with model-ver " + + newServiceModelVersionId + " does not exist in database \n"); + while(serviceInstanceNodeList.hasNext()) { + serviceInstanceNodeList.next(); + serviceInstanceMigrationFailure++; + } + } + else { + newModelVerNode = modelVerNodeList.next(); + while (serviceInstanceNodeList.hasNext()) { + Vertex serviceInstanceVtx = serviceInstanceNodeList.next(); + boolean updateSuccess = false; + if (serviceInstanceVtx != null) { + logger.info("Updating service-instance " + serviceInstanceVtx.value(SERVICE_INSTANCE_ID) + + " with current model-invariant-id " + + (serviceInstanceVtx.property(MODEL_INVARIANT_ID).isPresent() + ? serviceInstanceVtx.value(MODEL_INVARIANT_ID) : "null") + + " and current model-version-id " + + (serviceInstanceVtx.property(MODEL_VERSION_ID).isPresent() + ? serviceInstanceVtx.value(MODEL_VERSION_ID) : "null") + + " to use model-invariant-id " + newServiceModelInvariantId + " and" + + " model-version-id " + newServiceModelVersionId); + try { + Vertex oldModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(serviceInstanceVtx).out("org.onap.relationships.inventory.IsA") + .has(AAIProperties.NODE_TYPE, MODEL_VER); + if(modelVerQuery.hasNext()) { + oldModelVerNode = modelVerQuery.next(); + } + serviceInstanceVtx.property(MODEL_INVARIANT_ID_LOCAL, newServiceModelInvariantId); + serviceInstanceVtx.property(MODEL_VERSION_ID_LOCAL, newServiceModelVersionId); + if(newModelVerNode.property(MODEL_VERSION).isPresent()) { + serviceInstanceVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION)); + } + this.touchVertexProperties(serviceInstanceVtx, false); + if(oldModelVerNode != null) { + this.swingEdges(oldModelVerNode, newModelVerNode, SERVICE_INSTANCE, "org.onap.relationships.inventory.IsA", "IN"); + } + else { + this.createPrivateEdge(newModelVerNode, serviceInstanceVtx); + } + updateSuccess = true; + } catch (Exception e) { + logger.info(e.toString()); + logger.info(MIGRATION_ERROR + "ERROR: Failure to update service-instance " + + serviceInstanceVtx.value(SERVICE_INSTANCE_ID) + "\n"); + serviceInstanceMigrationFailure++; + } + if(updateSuccess) { + String dmaapMsg = System.nanoTime() + "_" + serviceInstanceVtx.id().toString() + "_" + + serviceInstanceVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("Update of service-instance " + + serviceInstanceVtx.value(SERVICE_INSTANCE_ID) + " successful \n"); + serviceInstanceMigrationSuccess++; + } + } + } + } + } + + private void updateVfModules(String vnfType, String vnfName, String newResourceModelInvariantId, + String newResourceModelVersionId, ArrayList<VfModuleFileData> vfModuleFileLineList) { + int numberOfLines = vfModuleFileLineList.size(); + ArrayList<Integer> processedNodes = new ArrayList<Integer>(); + for(int i = 0; i < numberOfLines; i++) { + VfModuleFileData currentLine = vfModuleFileLineList.get(i); + String vfModuleId = currentLine.getVfModuleId(); + String vfModuleModelName = currentLine.getVfModuleModelName(); + String imageName = currentLine.getImageName(); + String vfModuleInvariantId = ""; + String vfModuleVersionId = ""; + GraphTraversal<Vertex, Vertex> vfModuleNodeList = g.V(). + has(AAIProperties.NODE_TYPE, GENERIC_VNF).has(VNF_NAME, vnfName).has(VNF_TYPE, vnfType). + in("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, VF_MODULE). + has(VF_MODULE_ID, vfModuleId); + if(vfModuleNodeList.hasNext()) { + GraphTraversal<Vertex, Vertex> modelElementNodeList = g.V(). + has(AAIProperties.NODE_TYPE, MODEL).has(MODEL_INVARIANT_ID, newResourceModelInvariantId). + in("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, MODEL_VER). + has(MODEL_VERSION_ID, newResourceModelVersionId).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_ELEMENT); + while(modelElementNodeList.hasNext()) { + Vertex modelElement = modelElementNodeList.next(); + GraphTraversal<Vertex, Vertex> modelVersionLookup = g.V(modelElement).out("org.onap.relationships.inventory.IsA"). + has(AAIProperties.NODE_TYPE, MODEL_VER); + while(modelVersionLookup.hasNext()) { + Vertex modelVersionVertex = modelVersionLookup.next(); + if(modelVersionVertex.value(MODEL_NAME).equals(vfModuleModelName)) { + vfModuleVersionId = modelVersionVertex.value(MODEL_VERSION_ID); + vfModuleInvariantId = g.V(modelVersionVertex).out("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL).next().value(MODEL_INVARIANT_ID); + break; + } + } + if(!vfModuleVersionId.isEmpty() && !vfModuleInvariantId.isEmpty()) { + break; + } + GraphTraversal<Vertex, Vertex> modelElementLookup = g.V(modelElement).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_ELEMENT); + while(modelElementLookup.hasNext()) { + ArrayList<String> returnedValues = recursiveSearchForModelName(vfModuleModelName, modelElementLookup.next()); + if(!returnedValues.isEmpty()) { + vfModuleInvariantId = returnedValues.get(0); + vfModuleVersionId = returnedValues.get(1); + break; + } + } + if(!vfModuleVersionId.isEmpty() && !vfModuleInvariantId.isEmpty()) { + break; + } + } + while (vfModuleNodeList.hasNext()) { + Vertex vfModuleVtx = vfModuleNodeList.next(); + boolean updateSuccess = false; + if (vfModuleVtx != null) { + if(vfModuleInvariantId.isEmpty() && vfModuleVersionId.isEmpty()) { + logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + + ". model-invariant-id and model-version-id not found \n"); + vfModuleMigrationFailure++; + } + else if(vfModuleInvariantId.isEmpty()) { + logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + + ". model-invariant-id not found \n"); + vfModuleMigrationFailure++; + } + else if(vfModuleVersionId.isEmpty()) { + logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + + ". model-version-id not found \n"); + vfModuleMigrationFailure++; + } + else { + logger.info("Updating vf-module " + vfModuleVtx.value(VF_MODULE_ID) + + " with current model-invariant-id " + + (vfModuleVtx.property(MODEL_INVARIANT_ID).isPresent() + ? vfModuleVtx.value(MODEL_INVARIANT_ID) : "null") + + " and current model-version-id " + + (vfModuleVtx.property(MODEL_VERSION_ID).isPresent() + ? vfModuleVtx.value(MODEL_VERSION_ID) : "null") + + " to use model-invariant-id " + vfModuleInvariantId + " and" + + " model-version-id " + vfModuleVersionId); + Vertex newModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL). + has(MODEL_INVARIANT_ID, vfModuleInvariantId).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, vfModuleVersionId); + if(!modelVerNodeList.hasNext()) { + logger.info(MIGRATION_ERROR + "ERROR: Model " + vfModuleInvariantId + " with model-ver " + + vfModuleVersionId + " could not be found in traversal, error in finding vf-module model \n"); + vfModuleMigrationFailure++; + } + else { + newModelVerNode = modelVerNodeList.next(); + try { + Vertex oldModelVerNode = null; + GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(vfModuleVtx).out("org.onap.relationships.inventory.IsA") + .has(AAIProperties.NODE_TYPE, MODEL_VER); + if(modelVerQuery.hasNext()) { + oldModelVerNode = modelVerQuery.next(); + } + vfModuleVtx.property(MODEL_INVARIANT_ID_LOCAL, vfModuleInvariantId); + vfModuleVtx.property(MODEL_VERSION_ID_LOCAL, vfModuleVersionId); + if(newModelVerNode.property(MODEL_VERSION).isPresent()) { + vfModuleVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION)); + } + this.touchVertexProperties(vfModuleVtx, false); + if(oldModelVerNode != null) { + this.swingEdges(oldModelVerNode, newModelVerNode, VF_MODULE, "org.onap.relationships.inventory.IsA", "IN"); + } + else { + this.createPrivateEdge(newModelVerNode, vfModuleVtx); + } + updateSuccess = true; + } catch (Exception e) { + logger.info(e.toString()); + logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " + + vfModuleVtx.value(VF_MODULE_ID) + "\n"); + vfModuleMigrationFailure++; + } + } + } + } + if(updateSuccess) { + String dmaapMsg = System.nanoTime() + "_" + vfModuleVtx.id().toString() + "_" + + vfModuleVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + logger.info("Update of vf-module " + + vfModuleVtx.value(VF_MODULE_ID) + " successful \n"); + vfModuleMigrationSuccess++; + if(!processedNodes.contains(i)) { + processedNodes.add(i); + } + updateVserverAndImage(vfModuleId, imageName); + } + } + } + } + int processedNodesNum = processedNodes.size(); + for (int i = 0; i < processedNodesNum; i++) { + vfModuleFileLineList.remove(i); + } + } + + private ArrayList<String> recursiveSearchForModelName(String vfModuleModelName, Vertex modelElement) { + ArrayList<String> returnedValues = new ArrayList<String>(); + GraphTraversal<Vertex, Vertex> modelVersionLookup = g.V(modelElement).out("org.onap.relationships.inventory.IsA"). + has(AAIProperties.NODE_TYPE, MODEL_VER); + while(modelVersionLookup.hasNext()) { + Vertex modelVersionVertex = modelVersionLookup.next(); + if(modelVersionVertex.value(MODEL_NAME).equals(vfModuleModelName)) { + returnedValues.add(modelVersionVertex.value(MODEL_VERSION_ID)); + returnedValues.add(0, g.V(modelVersionVertex).out("org.onap.relationships.inventory.BelongsTo") + .next().value(MODEL_INVARIANT_ID)); + return returnedValues; + } + } + GraphTraversal<Vertex, Vertex> modelElementLookup = g.V(modelElement).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, MODEL_ELEMENT); + while(modelElementLookup.hasNext()) { + returnedValues = recursiveSearchForModelName(vfModuleModelName, modelElementLookup.next()); + if(!returnedValues.isEmpty()) { + return returnedValues; + } + } + return returnedValues; + } + + private void updateVserverAndImage(String vfModuleId, String imageName) { + GraphTraversal<Vertex, Vertex> vserverNodeList = g.V(). + has(AAIProperties.NODE_TYPE, VF_MODULE).has(VF_MODULE_ID, vfModuleId). + out("org.onap.relationships.inventory.Uses").has(AAIProperties.NODE_TYPE, VSERVER); + while (vserverNodeList.hasNext()) { + Vertex vserverVtx = vserverNodeList.next(); + boolean updateSuccess = false; + GraphTraversal<Vertex, Vertex> oldImageLookup = g.V(vserverVtx).out("org.onap.relationships.inventory.Uses"). + has(AAIProperties.NODE_TYPE, IMAGE); + Vertex oldImageVtx = null; + if(oldImageLookup.hasNext()) { + oldImageVtx = oldImageLookup.next(); + } + GraphTraversal<Vertex, Vertex> newImageLookup = g.V(vserverVtx).out("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, TENANT).out("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, CLOUD_REGION).in("org.onap.relationships.inventory.BelongsTo"). + has(AAIProperties.NODE_TYPE, IMAGE).has(IMAGE_NAME, imageName); + Vertex newImageVtx = null; + if(newImageLookup.hasNext()) { + newImageVtx = newImageLookup.next(); + } + if (vserverVtx != null && newImageVtx!= null) { + logger.info("Updating vserver " + vserverVtx.value(VSERVER_ID) + + " to replace all current image relationships with relationship to new image " + imageName); + try { + if(oldImageVtx != null) { + this.swingEdges(oldImageVtx, newImageVtx, VSERVER, "org.onap.relationships.inventory.Uses", "IN"); + } + else { + this.createEdgeIfPossible(EdgeType.COUSIN, vserverVtx, newImageVtx); + } + updateSuccess = true; + } catch (Exception e) { + logger.info(e.toString()); + logger.info(MIGRATION_ERROR + "ERROR: Failure to update vserver " + + vserverVtx.value(VSERVER_ID) + " with image " + imageName + "\n"); + imageMigrationFailure++; + } + if(updateSuccess) { + logger.info("Update of vserver " + + vserverVtx.value(VSERVER_ID) + " with image " + newImageVtx.value(IMAGE_NAME) + + " successful \n"); + imageMigrationSuccess++; + } + } + } + } + + @Override + public Status getStatus() { + if (checkLog) { + return Status.CHECK_LOGS; + } + else if (success) { + return Status.SUCCESS; + } + else { + return Status.FAILURE; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{GENERIC_VNF, SERVICE_INSTANCE, VF_MODULE, VSERVER, IMAGE}); + } + + @Override + public String getMigrationName() { + return "MigrateRadcomChanges"; + } + + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + return null; + } + + @Override + public String getNodeTypeRestriction() { + return VSERVER; + } + + @Override + public String getEdgeLabelRestriction() { + return "org.onap.relationships.inventory.Uses"; + } + + @Override + public String getEdgeDirRestriction() { + return "IN"; + } + + @Override + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + } +} diff --git a/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java b/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java index 2431d11..9bcd843 100644 --- a/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java +++ b/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java @@ -27,7 +27,7 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.javatuples.Pair; import org.onap.aai.exceptions.AAIException; import org.onap.aai.rest.dsl.DslQueryProcessor; -import org.onap.aai.restcore.search.GroovyQueryBuilderSingleton; +import org.onap.aai.restcore.search.GroovyQueryBuilder; import org.onap.aai.restcore.util.URITools; import org.onap.aai.serialization.engines.TransactionalGraphEngine; import org.onap.aai.serialization.queryformats.SubGraphStyle; @@ -37,7 +37,6 @@ import javax.ws.rs.core.MultivaluedMap; import java.io.FileNotFoundException; import java.net.URI; import java.util.*; -import java.util.regex.Matcher; import java.util.regex.Pattern; public abstract class GenericQueryProcessor { @@ -48,7 +47,7 @@ public abstract class GenericQueryProcessor { protected static Pattern p = Pattern.compile("query/(.*+)"); protected Optional<String> gremlin; protected final TransactionalGraphEngine dbEngine; - protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance(); + protected static GroovyQueryBuilder groovyQueryBuilder = new GroovyQueryBuilder(); protected final boolean isGremlin; protected Optional<DslQueryProcessor> dslQueryProcessorOptional; /* dsl parameters to store dsl query and to check @@ -122,7 +121,7 @@ public abstract class GenericQueryProcessor { String dslUserQuery = dsl.get(); if(dslQueryProcessorOptional.isPresent()){ String dslQuery = dslQueryProcessorOptional.get().parseAaiQuery(dslUserQuery); - query = queryBuilderSingleton.executeTraversal(dbEngine, dslQuery, params); + query = groovyQueryBuilder.executeTraversal(dbEngine, dslQuery, params); String startPrefix = "g.V()"; query = startPrefix + query; } diff --git a/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java b/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java index 3db4301..8f83751 100644 --- a/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java +++ b/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java @@ -20,7 +20,7 @@ package org.onap.aai.rest.search; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; -import org.onap.aai.restcore.search.GremlinGroovyShellSingleton; +import org.onap.aai.restcore.search.GremlinGroovyShell; import java.util.Map; @@ -35,7 +35,7 @@ public class GroovyShellImpl extends GenericQueryProcessor { params.put("g", this.dbEngine.asAdmin().getTraversalSource()); - GremlinGroovyShellSingleton shell = GremlinGroovyShellSingleton.getInstance(); + GremlinGroovyShell shell = new GremlinGroovyShell(); return shell.executeTraversal(query, params); } diff --git a/src/main/java/org/onap/aai/schema/GenTester.java b/src/main/java/org/onap/aai/schema/GenTester.java index 924009d..f35cece 100644 --- a/src/main/java/org/onap/aai/schema/GenTester.java +++ b/src/main/java/org/onap/aai/schema/GenTester.java @@ -24,13 +24,14 @@ import com.att.eelf.configuration.EELFLogger; import com.att.eelf.configuration.EELFManager; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.schema.JanusGraphManagement; +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbgen.SchemaGenerator; import org.onap.aai.dbmap.AAIGraph; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LoggingContext; import org.onap.aai.logging.LoggingContext.StatusCode; -import org.onap.aai.util.AAIConfig; -import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.*; import org.springframework.context.annotation.AnnotationConfigApplicationContext; import java.util.Properties; @@ -46,7 +47,7 @@ public class GenTester { * * @param args the arguments */ - public static void main(String[] args) { + public static void main(String[] args) throws AAIException{ JanusGraph graph = null; System.setProperty("aai.service.name", GenTester.class.getSimpleName()); @@ -67,11 +68,23 @@ public class GenTester { LoggingContext.statusCode(StatusCode.COMPLETE); LoggingContext.responseCode(LoggingContext.SUCCESS); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); - + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + LOGGER.error("Problems running the tool "+aai.getMessage()); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } try { LOGGER.info("GenTester uses either cql jar or Cassandra jar"); @@ -158,6 +171,5 @@ public class GenTester { LOGGER.auditEvent("-- all done, if program does not exit, please kill."); System.exit(0); } - } diff --git a/src/main/java/org/onap/aai/util/ExceptionTranslator.java b/src/main/java/org/onap/aai/util/ExceptionTranslator.java new file mode 100644 index 0000000..a395c23 --- /dev/null +++ b/src/main/java/org/onap/aai/util/ExceptionTranslator.java @@ -0,0 +1,47 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.util; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.onap.aai.GraphAdminApp; +import org.onap.aai.exceptions.AAIException; + +public class ExceptionTranslator { + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ExceptionTranslator.class); + public static AAIException schemaServiceExceptionTranslator(Exception ex) { + AAIException aai = null; + LOGGER.info("Exception is " + ExceptionUtils.getRootCause(ex).getMessage() + "Root cause is"+ ExceptionUtils.getRootCause(ex).toString()); + if(ExceptionUtils.getRootCause(ex).getMessage().contains("NodeIngestor")){ + aai = new AAIException("AAI_3026","Error reading OXM from SchemaService - Investigate"); + } + else if(ExceptionUtils.getRootCause(ex).getMessage().contains("EdgeIngestor")){ + aai = new AAIException("AAI_3027","Error reading EdgeRules from SchemaService - Investigate"); + } + else if(ExceptionUtils.getRootCause(ex).getMessage().contains("Connection refused")){ + aai = new AAIException("AAI_3025","Error connecting to SchemaService - Investigate"); + }else { + aai = new AAIException("AAI_3025","Error connecting to SchemaService - Please Investigate"); + } + + return aai; + } +} diff --git a/src/main/java/org/onap/aai/util/GraphAdminConstants.java b/src/main/java/org/onap/aai/util/GraphAdminConstants.java new file mode 100644 index 0000000..017d92e --- /dev/null +++ b/src/main/java/org/onap/aai/util/GraphAdminConstants.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.util; + +public final class GraphAdminConstants { + + public static final int AAI_SNAPSHOT_DEFAULT_THREADS_FOR_CREATE = 15; + public static final int AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD = 25; + public static final Long AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS = 1L; + public static final Long AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS = 1L; + public static final Long AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS = 50L; + public static final Long AAI_SNAPSHOT_DEFAULT_RETRY_DELAY_MS = 1500L; + public static final Long AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS = 9000L; + public static final Long AAI_SNAPSHOT_DEFAULT_STAGGER_THREAD_DELAY_MS = 5000L; + + public static final int AAI_GROOMING_DEFAULT_MAX_FIX = 150; + public static final int AAI_GROOMING_DEFAULT_SLEEP_MINUTES = 7; + + public static final int AAI_DUPETOOL_DEFAULT_MAX_FIX = 25; + public static final int AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES = 7; + + + + + /** + * Instantiates a new GraphAdmin constants. + */ + private GraphAdminConstants() { + // prevent instantiation + } + +} diff --git a/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java b/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java new file mode 100644 index 0000000..992223e --- /dev/null +++ b/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java @@ -0,0 +1,40 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.util; + +import java.util.Iterator; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +public class GraphAdminDBUtils { + + private static EELFLogger LOGGER = EELFManager.getInstance().getLogger(GraphAdminDBUtils.class); + + public static void logConfigs(org.apache.commons.configuration.Configuration configuration) { + + if (configuration != null && configuration.getKeys() != null) { + Iterator<String> keys = configuration.getKeys(); + keys.forEachRemaining( + key -> LOGGER.info("Key is " + key + "Value is " + configuration.getProperty(key).toString())); + } + + } +} diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java index ad96efe..e559a78 100644 --- a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java +++ b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java @@ -21,8 +21,11 @@ package org.onap.aai.util; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LoggingContext; import org.onap.aai.migration.EventAction; import org.onap.aai.setup.SchemaVersions; @@ -32,7 +35,7 @@ import java.util.*; public class SendDeleteMigrationNotificationsMain { - public static void main(String[] args) { + public static void main(String[] args) throws AAIException { Arrays.asList(args).stream().forEach(System.out::println); @@ -47,11 +50,22 @@ public class SendDeleteMigrationNotificationsMain { LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE); LoggingContext.responseCode(LoggingContext.SUCCESS); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); - + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path"); diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java index 29eb1da..6ae2bb4 100644 --- a/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java +++ b/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java @@ -21,8 +21,11 @@ package org.onap.aai.util; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; +import org.onap.aai.config.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LoggingContext; import org.onap.aai.migration.EventAction; import org.onap.aai.setup.SchemaVersions; @@ -32,7 +35,7 @@ import java.util.*; public class SendMigrationNotificationsMain { - public static void main(String[] args) { + public static void main(String[] args) throws AAIException { Arrays.asList(args).stream().forEach(System.out::println); @@ -47,11 +50,23 @@ public class SendMigrationNotificationsMain { LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE); LoggingContext.responseCode(LoggingContext.SUCCESS); - AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext( - "org.onap.aai.config", - "org.onap.aai.setup" - ); - + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); + PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); + initializer.initialize(ctx); + try { + ctx.scan( + "org.onap.aai.config", + "org.onap.aai.setup" + ); + ctx.refresh(); + } catch (Exception e) { + AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e); + System.out.println("Problems running tool "+aai.getMessage()); + LoggingContext.statusCode(LoggingContext.StatusCode.ERROR); + LoggingContext.responseCode(LoggingContext.DATA_ERROR); + ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry"); + throw aai; + } LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class); SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class); String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path"); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d636bb6..cfab945 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -48,7 +48,7 @@ schema.ingest.file=${server.local.startpath}/application.properties schema.uri.base.path=/aai # Lists all of the versions in the schema -schema.version.list=v8,v9,v10,v11,v12,v13,v14 +schema.version.list=v8,v9,v10,v11,v12,v13,v14,v15 # Specifies from which version should the depth parameter to default to zero schema.version.depth.start=v9 # Specifies from which version should the related link be displayed in response payload @@ -62,3 +62,14 @@ schema.version.namespace.change.start=v12 schema.version.edge.label.start=v12 # Specifies the version that the application should default to schema.version.api.default=v14 +schema.translator.list=schema-service +#schema.service.client=no-auth +schema.service.base.url=https://localhost:8452/aai/schema-service/v1/ +schema.service.nodes.endpoint=nodes?version= +schema.service.edges.endpoint=edgerules?version= +schema.service.versions.endpoint=versions + +schema.service.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore +schema.service.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0) +schema.service.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore +schema.service.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0) diff --git a/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties b/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties index a8f5e95..4b9371f 100644 --- a/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties +++ b/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties @@ -1,4 +1,4 @@ -topic=AAI-EVENT
-partition=AAI
-maxBatchSize=100
-maxAgeMs=250
+topic=AAI-EVENT +partition=AAI +maxBatchSize=100 +maxAgeMs=250 diff --git a/src/main/resources/etc/appprops/aaiconfig.properties b/src/main/resources/etc/appprops/aaiconfig.properties index d2cf54b..c15c6f5 100644 --- a/src/main/resources/etc/appprops/aaiconfig.properties +++ b/src/main/resources/etc/appprops/aaiconfig.properties @@ -1,144 +1,149 @@ -#
-# ============LICENSE_START=======================================================
-# org.onap.aai
-# ================================================================================
-# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-
-####################################################################
-# REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE
-# TEMPLATE AND *ALL* DATAFILES
-####################################################################
-
-aai.config.checktime=1000
-
-# this could come from siteconfig.pl?
-aai.config.nodename=AutomaticallyOverwritten
-
-aai.transaction.logging=true
-aai.transaction.logging.get=true
-aai.transaction.logging.post=true
-
-aai.server.url.base=https://localhost:8443/aai/
-aai.server.url=https://localhost:8443/aai/v14/
-aai.oldserver.url.base=https://localhost:8443/aai/servers/
-aai.oldserver.url=https://localhost:8443/aai/servers/v2/
-aai.global.callback.url=https://localhost:8443/aai/
-
-# Start of INTERNAL Specific Properties
-
-aai.truststore.filename=aai_keystore
-aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
-aai.keystore.filename=aai-client-cert.p12
-aai.keystore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
-
-aai.realtime.clients=RO,SDNC,MSO,SO
-
-# End of INTERNAL Specific Properties
-
-aai.notification.current.version=v14
-aai.notificationEvent.default.status=UNPROCESSED
-aai.notificationEvent.default.eventType=AAI-EVENT
-aai.notificationEvent.default.domain=devINT1
-aai.notificationEvent.default.sourceName=aai
-aai.notificationEvent.default.sequenceNumber=0
-aai.notificationEvent.default.severity=NORMAL
-aai.notificationEvent.default.version=v14
-# This one lets us enable/disable resource-version checking on updates/deletes
-aai.resourceversion.enableflag=true
-aai.logging.maxStackTraceEntries=10
-aai.default.api.version=v14
-
-# Used by Model-processing code
-aai.model.delete.sleep.per.vtx.msec=500
-aai.model.query.resultset.maxcount=50
-aai.model.query.timeout.sec=90
-
-# Used by Data Grooming
-aai.grooming.default.max.fix=150
-aai.grooming.default.sleep.minutes=7
-
-# Used by DupeTool
-aai.dupeTool.default.max.fix=25
-aai.dupeTool.default.sleep.minutes=7
-
-aai.model.proc.max.levels=50
-aai.edgeTag.proc.max.levels=50
-
-# Used by the ForceDelete tool
-aai.forceDel.protected.nt.list=cloud-region
-aai.forceDel.protected.edge.count=10
-aai.forceDel.protected.descendant.count=10
-
-# Used for CTAG-Pool generation
-aai.ctagPool.rangeString.vplsPe1=2001-2500
-aai.ctagPool.rangeString.vplsPe2=2501-3000
-
-aai.jms.enable=false
-
-#used by the dataGrooming and dataSnapshot cleanup tasks
-aai.cron.enable.datagroomingcleanup=true
-aai.cron.enable.datasnapshotcleanup=true
-aai.datagrooming.agezip=5
-aai.datagrooming.agedelete=30
-aai.datasnapshot.agezip=5
-aai.datasnapshot.agedelete=30
-
-#used by the dataSnapshot and dataGrooming tasks
-aai.cron.enable.dataSnapshot=true
-aai.cron.enable.dataGrooming=true
-
-#used by the dataGrooming tasks
-aai.datagrooming.enableautofix=true
-aai.datagrooming.enabledupefixon=true
-aai.datagrooming.enabledontfixorphans=true
-aai.datagrooming.enabletimewindowminutes=true
-aai.datagrooming.enableskiphostcheck=false
-aai.datagrooming.enablesleepminutes=false
-aai.datagrooming.enableedgesonly=false
-aai.datagrooming.enableskipedgechecks=false
-aai.datagrooming.enablemaxfix=false
-aai.datagrooming.enablesinglecommits=false
-aai.datagrooming.enabledupecheckoff=false
-aai.datagrooming.enableghost2checkoff=false
-aai.datagrooming.enableghost2fixon=false
-aai.datagrooming.enablef=false
-
-# used by the dataGrooming to set values
-aai.datagrooming.timewindowminutesvalue=10500
-aai.datagrooming.sleepminutesvalue=100
-aai.datagrooming.maxfixvalue=10
-aai.datagrooming.fvalue=10
-
-#timeout for traversal enabled flag
-aai.graphadmin.timeoutenabled=true
-
-#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)
-aai.graphadmin.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1
-
-#default timeout limit added for graphadmin if not overridden (in ms)
-aai.graphadmin.timeoutlimit=180000
-
-# Disable the process check which are oriented towards linux OS
-# These props should only be true for local on windows
-aai.disable.check.snapshot.running=true
-aai.disable.check.grooming.running=true
-
-# Specify the params listed right here that you would have send to the dataSnapshot shell script
-# JUST_TAKE_SNAPSHOT
-# THREADED_SNAPSHOT 2 DEBUG
-# THREADED_SNAPSHOT 2
-aai.datasnapshot.params=JUST_TAKE_SNAPSHOT
-
+# +# ============LICENSE_START======================================================= +# org.onap.aai +# ================================================================================ +# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +#################################################################### +# REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE +# TEMPLATE AND *ALL* DATAFILES +#################################################################### + +aai.config.checktime=1000 + +# this could come from siteconfig.pl? +aai.config.nodename=AutomaticallyOverwritten + +aai.transaction.logging=true +aai.transaction.logging.get=true +aai.transaction.logging.post=true + +aai.server.url.base=https://localhost:8443/aai/ +aai.server.url=https://localhost:8443/aai/v14/ +aai.oldserver.url.base=https://localhost:8443/aai/servers/ +aai.oldserver.url=https://localhost:8443/aai/servers/v2/ +aai.global.callback.url=https://localhost:8443/aai/ + +# Start of INTERNAL Specific Properties + +aai.truststore.filename=aai_keystore +aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0 +aai.keystore.filename=aai-client-cert.p12 +aai.keystore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0 + +aai.realtime.clients=RO,SDNC,MSO,SO + +# End of INTERNAL Specific Properties + +aai.notification.current.version=v14 +aai.notificationEvent.default.status=UNPROCESSED +aai.notificationEvent.default.eventType=AAI-EVENT +aai.notificationEvent.default.domain=devINT1 +aai.notificationEvent.default.sourceName=aai +aai.notificationEvent.default.sequenceNumber=0 +aai.notificationEvent.default.severity=NORMAL +aai.notificationEvent.default.version=v14 +# This one lets us enable/disable resource-version checking on updates/deletes +aai.resourceversion.enableflag=true +aai.logging.maxStackTraceEntries=10 +aai.default.api.version=v14 + +# Used by Model-processing code +aai.model.delete.sleep.per.vtx.msec=500 +aai.model.query.resultset.maxcount=50 +aai.model.query.timeout.sec=90 + +# Used by Data Grooming +aai.grooming.default.max.fix=150 +aai.grooming.default.sleep.minutes=7 + +# Used by Data Snapshot +aai.datasnapshot.default.threads.for.create=16 + +# Used by DupeTool +aai.dupeTool.default.max.fix=25 +aai.dupeTool.default.sleep.minutes=7 + +aai.model.proc.max.levels=50 +aai.edgeTag.proc.max.levels=50 + +# Used by the ForceDelete tool +aai.forceDel.protected.nt.list=cloud-region +aai.forceDel.protected.edge.count=10 +aai.forceDel.protected.descendant.count=10 + +# Used for CTAG-Pool generation +aai.ctagPool.rangeString.vplsPe1=2001-2500 +aai.ctagPool.rangeString.vplsPe2=2501-3000 + +aai.jms.enable=false + +#used by the dataGrooming and dataSnapshot cleanup tasks +aai.cron.enable.datagroomingcleanup=true +aai.cron.enable.datasnapshotcleanup=true +aai.datagrooming.agezip=5 +aai.datagrooming.agedelete=30 +aai.datasnapshot.agezip=5 +aai.datasnapshot.agedelete=30 + +#used by the dataSnapshot and dataGrooming tasks +aai.cron.enable.dataSnapshot=true +aai.cron.enable.dataGrooming=true + +#used by the dataGrooming tasks +aai.datagrooming.enableautofix=true +aai.datagrooming.enabledupefixon=true +aai.datagrooming.enabledontfixorphans=true +aai.datagrooming.enabletimewindowminutes=true +aai.datagrooming.enableskiphostcheck=false +aai.datagrooming.enablesleepminutes=false +aai.datagrooming.enableedgesonly=false +aai.datagrooming.enableskipedgechecks=false +aai.datagrooming.enablemaxfix=false +aai.datagrooming.enablesinglecommits=false +aai.datagrooming.enabledupecheckoff=false +aai.datagrooming.enableghost2checkoff=false +aai.datagrooming.enableghost2fixon=false +aai.datagrooming.enablef=false + +# used by the dataGrooming to set values +aai.datagrooming.timewindowminutesvalue=10500 +aai.datagrooming.sleepminutesvalue=100 +aai.datagrooming.maxfixvalue=10 +aai.datagrooming.fvalue=10 + +#timeout for traversal enabled flag +aai.graphadmin.timeoutenabled=true + +#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms) +aai.graphadmin.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1 + +#default timeout limit added for graphadmin if not overridden (in ms) +aai.graphadmin.timeoutlimit=180000 + +# Disable the process check which are oriented towards linux OS +# These props should only be true for local on windows +aai.disable.check.snapshot.running=true +aai.disable.check.grooming.running=true + +# Specify the params listed right here that you would have send to the dataSnapshot shell script +# JUST_TAKE_SNAPSHOT +# THREADED_SNAPSHOT 2 DEBUG +# THREADED_SNAPSHOT 2 +aai.datasnapshot.params=JUST_TAKE_SNAPSHOT + +# Threshold for margin of error (in ms) for resources_with_sot format to derive the most recent http method performed +aai.resource.formatter.threshold=10 diff --git a/src/main/resources/etc/appprops/error.properties b/src/main/resources/etc/appprops/error.properties index 708fb1f..1550d6c 100644 --- a/src/main/resources/etc/appprops/error.properties +++ b/src/main/resources/etc/appprops/error.properties @@ -32,6 +32,10 @@ AAI_3011=5:6:WARN:3011:400:3000:Unknown XML namespace used in payload AAI_3012=5:6:WARN:3012:400:3012:Unrecognized AAI function AAI_3013=5:6:WARN:3013:400:3013:Query payload missing required parameters %1 AAI_3014=5:6:WARN:3014:400:3014:Query payload is invalid %1 +AAI_3025=5:4:FATAL:3025:500:3025:Error connecting to Schema Service - Investigate +AAI_3026=5:4:FATAL:3026:500:3026:Error reading OXM from Schema Service - Investigate +AAI_3027=5:4:FATAL:3026:500:3026:Error reading EdgeRules from Schema Service - Investigate + # pol errors AAI_3100=5:1:WARN:3100:400:3100:Unsupported operation %1 AAI_3101=5:1:WARN:3101:403:3101:Attempt by client %1 to execute API %2 diff --git a/src/main/resources/etc/appprops/janusgraph-migration.properties b/src/main/resources/etc/appprops/janusgraph-migration.properties new file mode 100644 index 0000000..6090c84 --- /dev/null +++ b/src/main/resources/etc/appprops/janusgraph-migration.properties @@ -0,0 +1,52 @@ +# +# ============LICENSE_START======================================================= +# org.onap.aai +# ================================================================================ +# Copyright © 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +query.fast-property=true +# the following parameters are not reloaded automatically and require a manual bounce +storage.backend=inmemory +storage.hostname=localhost + +#schema.default=none +storage.lock.wait-time=300 +storage.hbase.table=aaigraph-dev1.dev +storage.hbase.ext.zookeeper.znode.parent=/hbase-unsecure +# Setting db-cache to false ensure the fastest propagation of changes across servers +cache.db-cache = false + +#load graphson file on startup +load.snapshot.file=false +query.smart-limit=false + + +#storage.backend=cql +#storage.hostname=host1,host2,host3 +#storage.cql.replication-strategy-class=NetworkTopologyStrategy +#storage.cql.replication-strategy-options=options +# for single datacenter cluster +#storage.cql.replication-factor=3 + +#storage.cql.keyspace=aaigraph_single_dc +#storage.cql.only-use-local-consistency-for-system-operations=true +#storage.cql.cluster-name=clusterName +#storage.cql.local-datacenter=dataCenter +#storage.cql.read-consistency-level=ONE +#storage.cql.write-consistency-level=QUORUM +#storage.connection-timeout=100000 +#cache.tx-cache-size = 1000000 +#metrics.enabled=true
\ No newline at end of file diff --git a/src/main/scripts/dataRestoreFromSnapshot.sh b/src/main/scripts/dataRestoreFromSnapshot.sh index 405a667..20bd0a8 100644 --- a/src/main/scripts/dataRestoreFromSnapshot.sh +++ b/src/main/scripts/dataRestoreFromSnapshot.sh @@ -21,10 +21,10 @@ if [ "$#" -lt 1 ]; then fi source_profile; -export PRE_JAVA_OPTS=${PRE_JAVA_OPTS:--Xms6g -Xmx8g}; +export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g}; #### Step 1) clear out the database -execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "CLEAR_ENTIRE_DATABASE" "$1" "$2" +execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "CLEAR_ENTIRE_DATABASE" "-f" "$1" if [ "$?" -ne "0" ]; then echo "Problem clearing out database." exit 1 @@ -39,7 +39,7 @@ fi #### Step 3) reload the data from a snapshot file -execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "RELOAD_DATA" "$1" +execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "RELOAD_DATA" "-f" "$1" if [ "$?" -ne "0" ]; then echo "Problem reloading data into the database." end_date; diff --git a/src/main/scripts/dataRestoreFromSnapshotMulti.sh b/src/main/scripts/dataRestoreFromSnapshotMulti.sh new file mode 100644 index 0000000..1e322dc --- /dev/null +++ b/src/main/scripts/dataRestoreFromSnapshotMulti.sh @@ -0,0 +1,52 @@ +#!/bin/ksh +# +# NOTE - this is the updated version of this script which uses multi-threaded reload code +# +# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore +# data to a database by doing three things: +# 1) clear out whatever data and schema are currently in the db +# 2) rebuild the schema (using the SchemaGenerator) +# 3) reload data from the passed-in datafile (which must found in the dataSnapShots directory and +# contain an xml view of the db data). +# + +COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P ) +. ${COMMON_ENV_PATH}/common_functions.sh + +start_date; +check_user; + +if [ "$#" -lt 1 ]; then + echo "Illegal number of parameters" + echo "usage: $0 previous_snapshot_filename" + exit 1 +fi + +source_profile; +export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g}; + +#### Step 1) clear out the database +execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "CLEAR_ENTIRE_DATABASE" "-f" "$1" +if [ "$?" -ne "0" ]; then + echo "Problem clearing out database." + exit 1 +fi + +#### Step 2) rebuild the db-schema +execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR" +if [ "$?" -ne "0" ]; then + echo "Problem rebuilding the schema (SchemaGenerator)." + exit 1 +fi + +#### Step 3) reload the data from a snapshot file + +execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "MULTITHREAD_RELOAD" "-f" "$1" +if [ "$?" -ne "0" ]; then + echo "Problem reloading data into the database." + end_date; + exit 1 +fi + +end_date; +exit 0 diff --git a/src/main/scripts/dataSnapshot.sh b/src/main/scripts/dataSnapshot.sh index f380e85..ca3b033 100644 --- a/src/main/scripts/dataSnapshot.sh +++ b/src/main/scripts/dataSnapshot.sh @@ -23,6 +23,15 @@ fi start_date; check_user; source_profile; + +# Only sourcing the file aai-graphadmin-tools-vars for dataSnapshot +# Do not source this for dataRestore or otherwise +# both taking a snapshot and restoring from a snapshot +# will use the same amount of memory but restoring from snapshot +# will use a lot more memory than taking a snapshot +if [ -f "$PROJECT_HOME/resources/aai-graphadmin-tools-vars.sh" ]; then + source $PROJECT_HOME/resources/aai-graphadmin-tools-vars.sh +fi; execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot $PROJECT_HOME/resources/logback.xml "$@" end_date; exit 0 diff --git a/src/main/scripts/dynamicPayloadGenerator.sh b/src/main/scripts/dynamicPayloadGenerator.sh index 3d30790..2140354 100644 --- a/src/main/scripts/dynamicPayloadGenerator.sh +++ b/src/main/scripts/dynamicPayloadGenerator.sh @@ -136,15 +136,15 @@ while getopts ":f:s:d:n:c:i:m:o:p:" opt; do echo 'Done' -set -A nodes pserver cloud-region availability-zone tenant zone complex - +set -A nodes customer service-subscription service pserver cloud-region availability-zone tenant zone complex #Create empty partial file > $INPUT_DATASNAPSHOT_FILE".partial" for nodeType in ${nodes[@]} - do - grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE >>$INPUT_DATASNAPSHOT_FILE'.partial' - done + do + grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE'.P'* >>$INPUT_DATASNAPSHOT_FILE'.out' + cat $INPUT_DATASNAPSHOT_FILE'.out' | cut -d':' -f2- > $INPUT_DATASNAPSHOT_FILE'.partial' + done execute_spring_jar org.onap.aai.dbgen.DynamicPayloadGenerator ${PROJECT_HOME}/resources/dynamicPayloadGenerator-logback.xml -s ${VALIDATE_SCHEMA} \ diff --git a/src/main/scripts/dynamicPayloadPartial.sh b/src/main/scripts/dynamicPayloadPartial.sh index 8021aa6..77b2919 100644 --- a/src/main/scripts/dynamicPayloadPartial.sh +++ b/src/main/scripts/dynamicPayloadPartial.sh @@ -3,7 +3,7 @@ #Create empty partial snapshot file INPUT_DATASNAPSHOT_FILE=$1 -set -A nodes pserver cloud-region availability-zone tenant zone complex +set -A nodes customer service-subscription service pserver cloud-region availability-zone tenant zone complex > $INPUT_DATASNAPSHOT_FILE".partial" for nodeType in ${nodes[@]} diff --git a/src/main/scripts/preDataRestore.sh b/src/main/scripts/preDataRestore.sh new file mode 100644 index 0000000..c176a1a --- /dev/null +++ b/src/main/scripts/preDataRestore.sh @@ -0,0 +1,44 @@ +#!/bin/ksh +# +# This script does just the first two steps of our normal dataRestoreFromSnapshot script. +# This should only be needed if we are trouble-shooting and need to run step 3 (the +# actual call to dataSnapshot) separately with different input params. +# +# This script does these two steps: +# 1) clear out whatever data and schema are currently in the db +# 2) rebuild the schema (using the SchemaGenerator) +# + +COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P ) +. ${COMMON_ENV_PATH}/common_functions.sh + +start_date; +check_user; + +if [ "$#" -lt 1 ]; then + echo "Illegal number of parameters" + echo "usage: $0 previous_snapshot_filename" + exit 1 +fi + +source_profile; +export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g}; + +#### Step 1) clear out the database +execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "CLEAR_ENTIRE_DATABASE" "$1" "$2" +if [ "$?" -ne "0" ]; then + echo "Problem clearing out database." + exit 1 +fi + +#### Step 2) rebuild the db-schema +execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR" +if [ "$?" -ne "0" ]; then + echo "Problem rebuilding the schema (SchemaGenerator)." + exit 1 +fi + + + +end_date; +exit 0 diff --git a/src/main/scripts/run_Migrations.sh b/src/main/scripts/run_Migrations.sh index 2b0f5c5..cbfe335 100644 --- a/src/main/scripts/run_Migrations.sh +++ b/src/main/scripts/run_Migrations.sh @@ -30,7 +30,7 @@ start_date; check_user; source_profile; -ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-realtime.properties"; +ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-migration.properties"; if [ -f "$PROJECT_HOME/resources/application.properties" ]; then # Get the application properties file and look for all lines |