aboutsummaryrefslogtreecommitdiffstats
path: root/src/main/java/org
diff options
context:
space:
mode:
authorDavid Brilla <xbrilla@fi.muni.cz>2020-03-30 18:50:36 +0200
committerDavid Brilla <david.brilla@tieto.com>2020-04-16 14:51:55 +0200
commit6b93c1c671ac4c87bcd1914f9514962f9d90887a (patch)
tree5182f848247b8109c126b1b8b65a30463af1a8e5 /src/main/java/org
parente5bf790117a4ad5aa6961fefcb39bc7892bd1632 (diff)
sonar fixes
Issue-ID: AAI-2848 Change-Id: I45a408c00a450144dec723694eddd60eba883c47 Signed-off-by: David Brilla <david.brilla@tieto.com>
Diffstat (limited to 'src/main/java/org')
-rw-r--r--src/main/java/org/onap/aai/GraphAdminApp.java12
-rw-r--r--src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java62
-rw-r--r--src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java55
-rw-r--r--src/main/java/org/onap/aai/dataexport/DataExportTasks.java68
-rw-r--r--src/main/java/org/onap/aai/datagrooming/DataGrooming.java218
-rw-r--r--src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java200
-rw-r--r--src/main/java/org/onap/aai/dbgen/DupeTool.java152
-rw-r--r--src/main/java/org/onap/aai/util/ExceptionTranslator.java10
-rw-r--r--src/main/java/org/onap/aai/util/GraphAdminDBUtils.java8
-rw-r--r--src/main/java/org/onap/aai/util/PositiveNumValidator.java2
-rw-r--r--src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java11
-rw-r--r--src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java2
-rw-r--r--src/main/java/org/onap/aai/util/SendMigrationNotifications.java29
-rw-r--r--src/main/java/org/onap/aai/util/UniquePropertyCheck.java20
14 files changed, 341 insertions, 508 deletions
diff --git a/src/main/java/org/onap/aai/GraphAdminApp.java b/src/main/java/org/onap/aai/GraphAdminApp.java
index 1030f3a..3f7abf3 100644
--- a/src/main/java/org/onap/aai/GraphAdminApp.java
+++ b/src/main/java/org/onap/aai/GraphAdminApp.java
@@ -19,21 +19,16 @@
*/
package org.onap.aai;
-import com.att.eelf.configuration.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.commons.lang3.exception.ExceptionUtils;
import org.onap.aai.aailog.logs.AaiDebugLog;
import org.onap.aai.config.PropertyPasswordConfiguration;
import org.onap.aai.dbmap.AAIGraph;
-import java.util.Properties;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.logging.LogFormatTools;
import org.onap.aai.nodes.NodeIngestor;
import org.onap.aai.util.AAIConfig;
-import org.onap.aai.util.AAIConstants;
import org.onap.aai.util.ExceptionTranslator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
@@ -75,9 +70,6 @@ public class GraphAdminApp {
public static final String APP_NAME = "GraphAdmin";
private static final Logger LOGGER = LoggerFactory.getLogger(GraphAdminApp.class);
-
- private static final String FROMAPPID = "AAI-GA";
- private static final String TRANSID = UUID.randomUUID().toString();
private static AaiDebugLog debugLog = new AaiDebugLog();
static {
diff --git a/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java b/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
index 352e96f..306ffa0 100644
--- a/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
+++ b/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
@@ -49,64 +49,38 @@ public class PropertyPasswordConfiguration implements ApplicationContextInitiali
String certPath = environment.getProperty("server.certs.location");
File passwordFile = null;
File passphrasesFile = null;
- InputStream passwordStream = null;
- InputStream passphrasesStream = null;
Map<String, Object> sslProps = new LinkedHashMap<>();
// Override the passwords from application.properties if we find AAF certman files
if (certPath != null) {
- try {
- passwordFile = new File(certPath + ".password");
- passwordStream = new FileInputStream(passwordFile);
+ passwordFile = new File(certPath + ".password");
+ try (InputStream passwordStream = new FileInputStream(passwordFile)) {
- if (passwordStream != null) {
- String keystorePassword = null;
+ String keystorePassword = null;
- keystorePassword = IOUtils.toString(passwordStream);
- if (keystorePassword != null) {
- keystorePassword = keystorePassword.trim();
- }
- sslProps.put("server.ssl.key-store-password", keystorePassword);
- sslProps.put("schema.service.ssl.key-store-password", keystorePassword);
- } else {
- logger.info("Not using AAF Certman password file");
+ keystorePassword = IOUtils.toString(passwordStream);
+ if (keystorePassword != null) {
+ keystorePassword = keystorePassword.trim();
}
+ sslProps.put("server.ssl.key-store-password", keystorePassword);
+ sslProps.put("schema.service.ssl.key-store-password", keystorePassword);
} catch (IOException e) {
logger.warn("Not using AAF Certman password file, e=" + e.getMessage());
- } finally {
- if (passwordStream != null) {
- try {
- passwordStream.close();
- } catch (Exception e) {
- }
- }
}
- try {
- passphrasesFile = new File(certPath + ".passphrases");
- passphrasesStream = new FileInputStream(passphrasesFile);
+ passphrasesFile = new File(certPath + ".passphrases");
+ try (InputStream passphrasesStream = new FileInputStream(passphrasesFile)) {
- if (passphrasesStream != null) {
- String truststorePassword = null;
- Properties passphrasesProps = new Properties();
- passphrasesProps.load(passphrasesStream);
- truststorePassword = passphrasesProps.getProperty("cadi_truststore_password");
- if (truststorePassword != null) {
- truststorePassword = truststorePassword.trim();
- }
- sslProps.put("server.ssl.trust-store-password", truststorePassword);
- sslProps.put("schema.service.ssl.trust-store-password", truststorePassword);
- } else {
- logger.info("Not using AAF Certman passphrases file");
+ String truststorePassword = null;
+ Properties passphrasesProps = new Properties();
+ passphrasesProps.load(passphrasesStream);
+ truststorePassword = passphrasesProps.getProperty("cadi_truststore_password");
+ if (truststorePassword != null) {
+ truststorePassword = truststorePassword.trim();
}
+ sslProps.put("server.ssl.trust-store-password", truststorePassword);
+ sslProps.put("schema.service.ssl.trust-store-password", truststorePassword);
} catch (IOException e) {
logger.warn("Not using AAF Certman passphrases file, e=" + e.getMessage());
- } finally {
- if (passphrasesStream != null) {
- try {
- passphrasesStream.close();
- } catch (Exception e) {
- }
- }
}
}
for (PropertySource<?> propertySource : environment.getPropertySources()) {
diff --git a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
index dff22a4..8fc6295 100644
--- a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
+++ b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
@@ -18,36 +18,32 @@
* ============LICENSE_END=========================================================
*/
package org.onap.aai.datacleanup;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.FileTime;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.Map;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog;
-import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.logging.LogFormatTools;
import org.onap.aai.util.AAIConfig;
import org.onap.aai.util.AAIConstants;
import org.onap.logging.filter.base.ONAPComponents;
import org.onap.logging.ref.slf4j.ONAPLogConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.PropertySource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.nio.file.Files;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
@Component
@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
@@ -66,7 +62,7 @@ public class DataCleanupTasks {
ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
*/
@Scheduled(cron = "${datagroomingcleanup.cron}" )
- public void dataGroomingCleanup() throws AAIException, Exception {
+ public void dataGroomingCleanup() {
auditLog.logBefore("dataGroomingCleanup", ONAPComponents.AAI.toString() );
logger.debug("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
@@ -77,7 +73,6 @@ public class DataCleanupTasks {
String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";
File path = new File(dataGroomingDir);
- File archivepath = new File(archiveDir);
File dataGroomingPath = new File(dataGroomingArcDir);
logger.debug("The logDir is " + logDir);
@@ -139,20 +134,18 @@ public class DataCleanupTasks {
/**
* This method checks if the directory exists
- * @param DIR
+ * @param dir the Directory
*
*/
public boolean directoryExists(String dir) {
- File path = new File(dir);
- boolean exists = path.exists();
- return exists;
+ return new File(dir).exists();
}
- public Date getZipDate(Integer days) throws Exception {
+ public Date getZipDate(Integer days) {
return getZipDate(days, new Date());
}
- public Date getZipDate(Integer days, Date date) throws Exception{
+ public Date getZipDate(Integer days, Date date) {
Calendar cal = Calendar.getInstance();
logger.debug("The current date is " + date );
@@ -170,24 +163,21 @@ public class DataCleanupTasks {
BasicFileAttributes.class);
FileTime time = attr.creationTime();
String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );
- Date d = simpleDateFormat.parse(formatted);
- return d;
+ return simpleDateFormat.parse(formatted);
}
/**
* This method will zip the files and add it to the archive folder
* Checks if the archive folder exists, if not then creates one
* After adding the file to archive folder it deletes the file from the filepath
- * @throws AAIException
* @throws Exception
*/
- public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception {
+ public void archive(File file, String archiveDir, String afterArchiveDir) throws Exception {
logger.debug("Inside the archive folder");
String filename = file.getName();
logger.debug("file name is " +filename);
- File archivepath = new File(archiveDir);
-
+
String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
File dataGroomingPath = new File(afterArchiveDir);
@@ -239,7 +229,7 @@ public class DataCleanupTasks {
ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
*/
@Scheduled(cron = "${datasnapshotcleanup.cron}" )
- public void dataSnapshotCleanup() throws AAIException, Exception {
+ public void dataSnapshotCleanup() {
logger.info(ONAPLogConstants.Markers.ENTRY, "Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
@@ -249,7 +239,6 @@ public class DataCleanupTasks {
String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";
File path = new File(dataSnapshotDir);
- File archivepath = new File(archiveDir);
File dataSnapshotPath = new File(dataSnapshotArcDir);
logger.debug("The logDir is " + logDir);
diff --git a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
index 0131650..2d0625c 100644
--- a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
+++ b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
@@ -19,29 +19,15 @@
*/
package org.onap.aai.dataexport;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Properties;
-import java.util.TreeMap;
-import java.util.UUID;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
+import com.att.eelf.configuration.Configuration;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.comparator.LastModifiedFileComparator;
+import org.apache.commons.io.filefilter.DirectoryFileFilter;
+import org.apache.commons.io.filefilter.FileFileFilter;
+import org.apache.commons.io.filefilter.RegexFileFilter;
import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog;
import org.onap.aai.dbgen.DynamicPayloadGenerator;
import org.onap.aai.edges.EdgeIngestor;
-import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.logging.ErrorLogHelper;
import org.onap.aai.logging.LogFormatTools;
@@ -49,21 +35,18 @@ import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.util.AAIConfig;
import org.onap.aai.util.AAIConstants;
import org.onap.logging.filter.base.ONAPComponents;
-import org.onap.logging.ref.slf4j.ONAPLogConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.PropertySource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
-import com.att.eelf.configuration.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.apache.commons.io.comparator.LastModifiedFileComparator;
-import org.apache.commons.io.filefilter.DirectoryFileFilter;
-import org.apache.commons.io.filefilter.FileFileFilter;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.filefilter.RegexFileFilter;
+import java.io.*;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
/**
* DataExportTasks obtains a graph snapshot and invokes DynamicPayloadGenerator
@@ -73,12 +56,9 @@ import org.apache.commons.io.filefilter.RegexFileFilter;
@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
public class DataExportTasks {
- private AaiScheduledTaskAuditLog auditLog;
-
private static final Logger LOGGER;
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
- private static final String GA_MS = "aai-graphadmin";
-
+
static {
System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName());
Properties props = System.getProperties();
@@ -114,10 +94,9 @@ public class DataExportTasks {
/**
* The exportTask method.
*
- * @throws AAIException, Exception
*/
- public void exportTask() throws AAIException, Exception {
- auditLog = new AaiScheduledTaskAuditLog();
+ public void exportTask() throws Exception {
+ AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog();
auditLog.logBefore("dataExportTask", ONAPComponents.AAI.toString());
LOGGER.info("Started exportTask: " + dateFormat.format(new Date()));
if (AAIConfig.get("aai.dataexport.enable").equalsIgnoreCase("false")) {
@@ -166,7 +145,7 @@ public class DataExportTasks {
snapshotFilePath = findMultipleSnapshots();
}
- List<String> paramsList = new ArrayList<String>();
+ List<String> paramsList = new ArrayList<>();
paramsList.add("-s");
paramsList.add(enableSchemaValidation);
paramsList.add("-o");
@@ -229,11 +208,7 @@ public class DataExportTasks {
LOGGER.debug("Exception while running the check to see if dataExport is running "+ LogFormatTools.getStackTop(e));
}
- if(count > 0){
- return true;
- } else {
- return false;
- }
+ return count > 0;
}
/**
@@ -272,7 +247,7 @@ public class DataExportTasks {
AAIConstants.AAI_FILESEP + "dataSnapshots";
String snapshotName = null;
File targetDirFile = new File(targetDir);
- TreeMap<String,List<File>> fileMap = new TreeMap<String,List<File>>(String.CASE_INSENSITIVE_ORDER);
+ TreeMap<String,List<File>> fileMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
/*dataSnapshot.graphSON.201804022009.P0
dataSnapshot.graphSON.201804022009.P1
@@ -300,7 +275,7 @@ public class DataExportTasks {
String g1 = m.group(1);
LOGGER.debug ("Found group " + g1);
if ( !fileMap.containsKey(g1) ) {
- ArrayList<File> l = new ArrayList<File>();
+ ArrayList<File> l = new ArrayList<>();
l.add(f);
fileMap.put(g1, l);
}
@@ -342,9 +317,8 @@ public class DataExportTasks {
/**
* The deletePayload method deletes all the payload files that it finds at targetDirectory
* @param targetDirFile the directory that contains payload files
- * @throws AAIException
*/
- private static void deletePayload(File targetDirFile) throws AAIException {
+ private static void deletePayload(File targetDirFile) {
File[] allFilesArr = targetDirFile.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY);
if ( allFilesArr == null || allFilesArr.length == 0 ) {
diff --git a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
index 29a588b..88bfebb 100644
--- a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
+++ b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
@@ -88,14 +88,14 @@ public class DataGrooming {
private CommandLineArgs cArgs;
- HashMap<String, Vertex> orphanNodeHash ;
- HashMap<String, Vertex> missingAaiNtNodeHash ;
- HashMap<String, Vertex> badUriNodeHash ;
- HashMap<String, Vertex> badIndexNodeHash ;
- HashMap<String, Edge> oneArmedEdgeHash ;
- HashMap<String, Vertex> ghostNodeHash ;
- ArrayList<String> dupeGroups;
- Set<String> deleteCandidateList;
+ private Map<String, Vertex> orphanNodeHash ;
+ private Map<String, Vertex> missingAaiNtNodeHash ;
+ private Map<String, Vertex> badUriNodeHash ;
+ private Map<String, Vertex> badIndexNodeHash ;
+ private Map<String, Edge> oneArmedEdgeHash ;
+ private Map<String, Vertex> ghostNodeHash ;
+ private List<String> dupeGroups;
+ private Set<String> deleteCandidateList;
private int deleteCount = 0;
public DataGrooming(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
@@ -438,14 +438,14 @@ public class DataGrooming {
ArrayList<String> errArr = new ArrayList<>();
int totalNodeCount = 0;
- HashMap<String, String> misMatchedHash = new HashMap<String, String>();
- orphanNodeHash = new HashMap<String, Vertex>();
- missingAaiNtNodeHash = new HashMap<String, Vertex>();
- badUriNodeHash = new HashMap<String, Vertex>();
- badIndexNodeHash = new HashMap<String, Vertex>();
- oneArmedEdgeHash = new HashMap<String, Edge>();
- HashMap<String, String> emptyVertexHash = new HashMap<String, String>();
- ghostNodeHash = new HashMap<String, Vertex>();
+ HashMap<String, String> misMatchedHash = new HashMap<>();
+ orphanNodeHash = new HashMap<>();
+ missingAaiNtNodeHash = new HashMap<>();
+ badUriNodeHash = new HashMap<>();
+ badIndexNodeHash = new HashMap<>();
+ oneArmedEdgeHash = new HashMap<>();
+ HashMap<String, String> emptyVertexHash = new HashMap<>();
+ ghostNodeHash = new HashMap<>();
dupeGroups = new ArrayList<>();
LOGGER.debug(" Using default schemaVersion = [" + schemaVersions.getDefaultVersion().toString() + "]" );
@@ -494,15 +494,13 @@ public class DataGrooming {
// Determine what the key fields are for this nodeType - use an arrayList so they
// can be gotten out in a consistent order.
Set <String> keyPropsSet = entry.getValue().getKeys();
- ArrayList <String> keyProps = new ArrayList <String> ();
- keyProps.addAll(keyPropsSet);
+ ArrayList<String> keyProps = new ArrayList<>(keyPropsSet);
Set <String> indexedPropsSet = entry.getValue().getIndexedProperties();
- ArrayList <String> indexedProps = new ArrayList <String> ();
- indexedProps.addAll(indexedPropsSet);
+ ArrayList<String> indexedProps = new ArrayList<>(indexedPropsSet);
Iterator<String> indPropItr = indexedProps.iterator();
- HashMap <String,String> propTypeHash = new HashMap <String, String> ();
+ HashMap <String,String> propTypeHash = new HashMap<>();
while( indPropItr.hasNext() ){
String propName = indPropItr.next();
String propType = entry.getValue().getType(propName);
@@ -569,7 +567,7 @@ public class DataGrooming {
while (keyPropI.hasNext()) {
String propName = keyPropI.next();
String propVal = "";
- Object obj = thisVtx.<Object>property(propName).orElse(null);
+ Object obj = thisVtx.property(propName).orElse(null);
if (obj != null) {
propVal = obj.toString();
}
@@ -608,7 +606,7 @@ public class DataGrooming {
boolean updateOnlyFlag = false;
try {
processedVertices.add(thisVtx.id().toString());
- Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+ Object ob = thisVtx.property("aai-node-type").orElse(null);
if( ob == null && !skipIndexUpdateFix ){
updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps);
updateOnlyFlag = true;
@@ -640,7 +638,7 @@ public class DataGrooming {
// That is, you could have a node with no edges... which sounds like an orphan, but not all
// nodes require edges. For example, you could have a newly created "image" node which does not have
// any edges connected to it (using it) yet.
- Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+ Object ob = thisVtx.property("aai-node-type").orElse(null);
if( ob == null ){
// Group this with missing-node-type guys - which
// we will delete more readily than orphans.
@@ -648,7 +646,7 @@ public class DataGrooming {
missingAaiNtNodeHash.put(thisVid, thisVtx);
}
else {
- Object ob2 = thisVtx.<Object>property("aai-uuid").orElse(null);
+ Object ob2 = thisVtx.property("aai-uuid").orElse(null);
String auid = "";
if( ob2 != null ){
auid = ob2.toString();
@@ -716,7 +714,7 @@ public class DataGrooming {
boolean okFlag = true;
boolean updateOnlyFlag = false;
try {
- Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+ Object ob = thisVtx.property("aai-node-type").orElse(null);
if( ob == null && !skipIndexUpdateFix ){
updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps);
dummyUpdCount++;
@@ -782,7 +780,7 @@ public class DataGrooming {
if( depNodeTypes.isEmpty() && !dupeCheckOff ){
// For this nodeType, we haven't looked at the possibility of a
// non-dependent node where two verts have same key info
- ArrayList<ArrayList<Vertex>> nonDependentDupeSets = new ArrayList<ArrayList<Vertex>>();
+ ArrayList<ArrayList<Vertex>> nonDependentDupeSets = new ArrayList<>();
nonDependentDupeSets = getDupeSets4NonDepNodes(
TRANSID, FROMAPPID, g,
version, nType, tmpList,
@@ -927,7 +925,7 @@ public class DataGrooming {
Boolean cantGetUsingVid = false;
if (vIn != null) {
try {
- Object ob = vIn.<Object>property("aai-node-type").orElse(null);
+ Object ob = vIn.property("aai-node-type").orElse(null);
if (ob != null) {
vNtI = ob.toString();
keysMissing = anyKeyFieldsMissing(vNtI, vIn, loader);
@@ -1033,7 +1031,7 @@ public class DataGrooming {
cantGetUsingVid = false;
if (vOut != null) {
try {
- Object ob = vOut.<Object>property("aai-node-type").orElse(null);
+ Object ob = vOut.property("aai-node-type").orElse(null);
if (ob != null) {
vNtO = ob.toString();
keysMissing = anyKeyFieldsMissing(vNtO,
@@ -1342,7 +1340,7 @@ public class DataGrooming {
int dupeSetCounter = 0;
while (dupeIter.hasNext()) {
dupeSetCounter++;
- String dset = (String) dupeIter.next();
+ String dset = dupeIter.next();
bw.write("\n --- Duplicate Group # " + dupeSetCounter
+ " Detail -----------\n");
@@ -1426,7 +1424,7 @@ public class DataGrooming {
bw.write("\n ------------- Got these errors while processing: \n");
Iterator<String> errIter = errArr.iterator();
while (errIter.hasNext()) {
- String line = (String) errIter.next();
+ String line = errIter.next();
bw.write(line + "\n");
}
@@ -1540,7 +1538,7 @@ public class DataGrooming {
}// end of doTheGrooming()
- public void tryToReSetIndexedProps(Vertex thisVtx, String thisVidStr, ArrayList <String> indexedProps) {
+ public void tryToReSetIndexedProps(Vertex thisVtx, String thisVidStr, List <String> indexedProps) {
// Note - This is for when a node looks to be a phantom (ie. an index/pointer problem)
// We will only deal with properties that are indexed and have a value - and for those,
// we will re-set them to the same value they already have, so that hopefully if their
@@ -1551,7 +1549,7 @@ public class DataGrooming {
LOGGER.debug(" We will try to re-set the indexed properties for this node without changing any property values. VID = " + thisVidStr );
// These reserved-prop-names are all indexed for all nodes
- ArrayList <String> propList = new ArrayList <String> ();
+ ArrayList <String> propList = new ArrayList <> ();
propList.addAll(indexedProps);
// Add in the global props that we'd also like to reset
propList.add("aai-node-type");
@@ -1561,7 +1559,7 @@ public class DataGrooming {
while( propNameItr.hasNext() ){
String propName = propNameItr.next();
try {
- Object valObj = thisVtx.<Object>property(propName).orElse(null);
+ Object valObj = thisVtx.property(propName).orElse(null);
if( valObj != null ){
LOGGER.debug(" We will try resetting prop [" + propName
+ "], to val = [" + valObj.toString() + "] for VID = " + thisVidStr);
@@ -1577,7 +1575,7 @@ public class DataGrooming {
public void updateIndexedPropsForMissingNT(Vertex thisVtx, String thisVidStr, String nType,
- HashMap <String,String>propTypeHash, ArrayList <String> indexedProps) {
+ Map <String,String>propTypeHash, List <String> indexedProps) {
// This is for the very specific "missing-aai-node-type" scenario.
// That is: a node that does not have the "aai-node-type" property, but still has
// an aai-node-type Index pointing to it and is an orphan node. Nodes like this
@@ -1641,7 +1639,7 @@ public class DataGrooming {
if( propValObj != null ){
propVal = propValObj.toString();
}
- Object checkValObj = tmpV.<Object>property(propName).orElse(null);
+ Object checkValObj = tmpV.property(propName).orElse(null);
if( checkValObj == null ) {
return false;
}
@@ -1682,7 +1680,7 @@ public class DataGrooming {
Iterator<String> keyPropI = keyPropNamesColl.iterator();
while (keyPropI.hasNext()) {
String propName = keyPropI.next();
- Object ob = v.<Object>property(propName).orElse(null);
+ Object ob = v.property(propName).orElse(null);
if (ob == null || ob.toString().equals("")) {
// It is missing a key property
String thisVertId = v.id().toString();
@@ -1691,7 +1689,7 @@ public class DataGrooming {
return true;
}
}
- Object ob = v.<Object>property("aai-uri").orElse(null);
+ Object ob = v.property("aai-uri").orElse(null);
if (ob == null || ob.toString().equals("")) {
// It is missing a key property
String thisVertId = v.id().toString();
@@ -1770,7 +1768,7 @@ public class DataGrooming {
*/
public Vertex getPreferredDupe(String transId,
String fromAppId, GraphTraversalSource g,
- ArrayList<Vertex> dupeVertexList, String ver, Loader loader)
+ List<Vertex> dupeVertexList, String ver, Loader loader)
throws AAIException {
// This method assumes that it is being passed a List of
@@ -1793,14 +1791,14 @@ public class DataGrooming {
// If they don't all have the same aai-uri, then we will not
// choose between them - we'll need someone to manually
// check to pick which one makes sense to keep.
- Object uriOb = dupeVertexList.get(0).<Object>property("aai-uri").orElse(null);
+ Object uriOb = dupeVertexList.get(0).property("aai-uri").orElse(null);
if( uriOb == null || uriOb.toString().equals("") ){
// this is a bad node - hopefully will be picked up by phantom checker
return nullVtx;
}
String thisUri = uriOb.toString();
for (int i = 1; i < listSize; i++) {
- uriOb = dupeVertexList.get(i).<Object>property("aai-uri").orElse(null);
+ uriOb = dupeVertexList.get(i).property("aai-uri").orElse(null);
if( uriOb == null || uriOb.toString().equals("") ){
// this is a bad node - hopefully will be picked up by phantom checker
return nullVtx;
@@ -1864,11 +1862,11 @@ public class DataGrooming {
String vtxANodeType = "";
String vtxBNodeType = "";
- Object objType = vtxA.<Object>property("aai-node-type").orElse(null);
+ Object objType = vtxA.property("aai-node-type").orElse(null);
if (objType != null) {
vtxANodeType = objType.toString();
}
- objType = vtxB.<Object>property("aai-node-type").orElse(null);
+ objType = vtxB.property("aai-node-type").orElse(null);
if (objType != null) {
vtxBNodeType = objType.toString();
}
@@ -1884,7 +1882,7 @@ public class DataGrooming {
// (We'll check dep-node later)
// Determine what the key fields are for this nodeType
Collection <String> keyProps = new ArrayList <>();
- HashMap <String,Object> keyPropValsHash = new HashMap <String,Object>();
+ HashMap <String,Object> keyPropValsHash = new HashMap <>();
try {
keyProps = loader.introspectorFromName(vtxANodeType).getKeys();
} catch (AAIUnknownObjectException e) {
@@ -1896,12 +1894,12 @@ public class DataGrooming {
while (keyPropI.hasNext()) {
String propName = keyPropI.next();
String vtxAKeyPropVal = "";
- objType = vtxA.<Object>property(propName).orElse(null);
+ objType = vtxA.property(propName).orElse(null);
if (objType != null) {
vtxAKeyPropVal = objType.toString();
}
String vtxBKeyPropVal = "";
- objType = vtxB.<Object>property(propName).orElse(null);
+ objType = vtxB.property(propName).orElse(null);
if (objType != null) {
vtxBKeyPropVal = objType.toString();
}
@@ -1933,7 +1931,7 @@ public class DataGrooming {
Vertex tvCon = iter.next();
String conVid = tvCon.id().toString();
String nt = "";
- objType = tvCon.<Object>property("aai-node-type").orElse(null);
+ objType = tvCon.property("aai-node-type").orElse(null);
if (objType != null) {
nt = objType.toString();
}
@@ -1949,7 +1947,7 @@ public class DataGrooming {
Vertex tvCon = iter.next();
String conVid = tvCon.id().toString();
String nt = "";
- objType = tvCon.<Object>property("aai-node-type").orElse(null);
+ objType = tvCon.property("aai-node-type").orElse(null);
if (objType != null) {
nt = objType.toString();
}
@@ -2097,7 +2095,7 @@ public class DataGrooming {
String fromAppId, Graph g, GraphTraversalSource source, String version, String nType,
List<Vertex> passedVertList, Boolean dupeFixOn,
Set<String> deleteCandidateList,
- ArrayList<String> alreadyFoundDupeGroups, Loader loader ) {
+ List<String> alreadyFoundDupeGroups, Loader loader ) {
ArrayList<String> returnList = new ArrayList<>();
ArrayList<Vertex> checkVertList = new ArrayList<>();
@@ -2277,7 +2275,7 @@ public class DataGrooming {
// we're trying to find duplicates - so we
// allow for the case where more than one is under the same parent node.
- HashMap<String, ArrayList<Vertex>> retHash = new HashMap<String, ArrayList<Vertex>>();
+ HashMap<String, ArrayList<Vertex>> retHash = new HashMap<>();
if (loader.introspectorFromName(nType).isTopLevel()) {
// This method really should not have been called if this is not the
// kind of node
@@ -2302,7 +2300,7 @@ public class DataGrooming {
Vertex tmpParentVtx = getConnectedParent( g, thisVert );
if( tmpParentVtx != null ) {
String parentNt = null;
- Object obj = tmpParentVtx.<Object>property("aai-node-type").orElse(null);
+ Object obj = tmpParentVtx.property("aai-node-type").orElse(null);
if (obj != null) {
parentNt = obj.toString();
}
@@ -2427,13 +2425,11 @@ public class DataGrooming {
* @param graph the graph
* @param vtx
* @return true if aai-uri is populated and the aai-uri-index points to this vtx
- * @throws AAIException the AAI exception
*/
- public Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx )
- throws AAIException{
+ public Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx ) {
String aaiUriStr = "";
try {
- Object ob = origVtx.<Object>property("aai-uri").orElse(null);
+ Object ob = origVtx.property("aai-uri").orElse(null);
String origVid = origVtx.id().toString();
LOGGER.debug("DEBUG --- do checkAaiUriOk() for origVid = " + origVid);
if (ob == null || ob.toString().equals("")) {
@@ -2641,7 +2637,7 @@ public class DataGrooming {
}
else {
String nodeType = "";
- Object ob = tVert.<Object>property("aai-node-type").orElse(null);
+ Object ob = tVert.property("aai-node-type").orElse(null);
if( ob == null ){
nodeType = "null";
}
@@ -2662,8 +2658,7 @@ public class DataGrooming {
}
- private ArrayList <Vertex> getConnectedNodes(GraphTraversalSource g, Vertex startVtx )
- throws AAIException {
+ private ArrayList <Vertex> getConnectedNodes(GraphTraversalSource g, Vertex startVtx ) {
ArrayList <Vertex> retArr = new ArrayList <> ();
if( startVtx == null ){
@@ -2685,7 +2680,7 @@ public class DataGrooming {
private ArrayList <Vertex> getConnectedChildrenOfOneType( GraphTraversalSource g,
- Vertex startVtx, String childNType ) throws AAIException{
+ Vertex startVtx, String childNType ) {
ArrayList <Vertex> childList = new ArrayList <> ();
Iterator <Vertex> vertI = g.V(startVtx).union(__.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).inV(), __.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).outV());
@@ -2693,7 +2688,7 @@ public class DataGrooming {
Vertex tmpVtx = null;
while( vertI != null && vertI.hasNext() ){
tmpVtx = vertI.next();
- Object ob = tmpVtx.<Object>property("aai-node-type").orElse(null);
+ Object ob = tmpVtx.property("aai-node-type").orElse(null);
if (ob != null) {
String tmpNt = ob.toString();
if( tmpNt.equals(childNType)){
@@ -2708,7 +2703,7 @@ public class DataGrooming {
private Vertex getConnectedParent( GraphTraversalSource g,
- Vertex startVtx ) throws AAIException{
+ Vertex startVtx ) {
Vertex parentVtx = null;
Iterator <Vertex> vertI = g.V(startVtx).union(__.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).outV(), __.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).inV());
@@ -2789,7 +2784,7 @@ public class DataGrooming {
}
if( keyVals2VidHash.containsKey(hKey) ){
// We've already seen this key
- ArrayList <String> tmpVL = (ArrayList <String>)keyVals2VidHash.get(hKey);
+ ArrayList <String> tmpVL = keyVals2VidHash.get(hKey);
tmpVL.add(thisVid);
keyVals2VidHash.put(hKey, tmpVL);
}
@@ -2951,102 +2946,111 @@ class CommandLineArgs {
}
- public HashMap<String, Vertex> getGhostNodeHash() {
- return ghostNodeHash;
- }
-
- public void setGhostNodeHash(HashMap<String, Vertex> ghostNodeHash) {
- this.ghostNodeHash = ghostNodeHash;
- }
-
- public int getGhostNodeCount(){
- return getGhostNodeHash().size();
- }
-
- public HashMap<String, Vertex> getOrphanNodeHash() {
+ public Map<String, Vertex> getOrphanNodeHash() {
return orphanNodeHash;
}
- public void setOrphanNodeHash(HashMap<String, Vertex> orphanNodeHash) {
+ public DataGrooming setOrphanNodeHash(Map<String, Vertex> orphanNodeHash) {
this.orphanNodeHash = orphanNodeHash;
+ return this;
}
-
+
public int getOrphanNodeCount(){
return getOrphanNodeHash().size();
}
-
- public HashMap<String, Vertex> getMissingAaiNtNodeHash() {
+
+ public Map<String, Vertex> getMissingAaiNtNodeHash() {
return missingAaiNtNodeHash;
}
- public void setMissingAaiNtNodeHash(HashMap<String, Vertex> missingAaiNtNodeHash) {
+ public DataGrooming setMissingAaiNtNodeHash(Map<String, Vertex> missingAaiNtNodeHash) {
this.missingAaiNtNodeHash = missingAaiNtNodeHash;
+ return this;
}
-
+
public int getMissingAaiNtNodeCount(){
return getMissingAaiNtNodeHash().size();
}
-
- public HashMap<String, Vertex> getBadUriNodeHash() {
+
+ public Map<String, Vertex> getBadUriNodeHash() {
return badUriNodeHash;
}
- public void setBadUriNodeHash(HashMap<String, Vertex> badUriNodeHash) {
+ public DataGrooming setBadUriNodeHash(Map<String, Vertex> badUriNodeHash) {
this.badUriNodeHash = badUriNodeHash;
+ return this;
}
-
+
public int getBadUriNodeCount(){
return getBadUriNodeHash().size();
}
- public HashMap<String, Vertex> getBadIndexNodeHash() {
+ public Map<String, Vertex> getBadIndexNodeHash() {
return badIndexNodeHash;
}
- public void setBadIndexNodeHash(HashMap<String, Vertex> badIndexNodeHash) {
+ public DataGrooming setBadIndexNodeHash(Map<String, Vertex> badIndexNodeHash) {
this.badIndexNodeHash = badIndexNodeHash;
+ return this;
}
-
+
public int getBadIndexNodeCount(){
return getBadIndexNodeHash().size();
}
-
- public HashMap<String, Edge> getOneArmedEdgeHash() {
+
+ public Map<String, Edge> getOneArmedEdgeHash() {
return oneArmedEdgeHash;
}
- public void setOneArmedEdgeHash(HashMap<String, Edge> oneArmedEdgeHash) {
+ public int getOneArmedEdgeHashCount() {
+ return getOneArmedEdgeHash().size();
+ }
+
+
+
+ public DataGrooming setOneArmedEdgeHash(Map<String, Edge> oneArmedEdgeHash) {
this.oneArmedEdgeHash = oneArmedEdgeHash;
+ return this;
}
-
- public int getOneArmedEdgeHashCount(){
- return getOneArmedEdgeHash().size();
+
+ public Map<String, Vertex> getGhostNodeHash() {
+ return ghostNodeHash;
}
-
+
+ public DataGrooming setGhostNodeHash(Map<String, Vertex> ghostNodeHash) {
+ this.ghostNodeHash = ghostNodeHash;
+ return this;
+ }
+
+ public int getGhostNodeCount(){
+ return getGhostNodeHash().size();
+ }
+
+ public List<String> getDupeGroups() {
+ return dupeGroups;
+ }
+
+ public DataGrooming setDupeGroups(List<String> dupeGroups) {
+ this.dupeGroups = dupeGroups;
+ return this;
+ }
+
public Set<String> getDeleteCandidateList() {
return deleteCandidateList;
}
- public void setDeleteCandidateList(Set<String> deleteCandidateList) {
+ public DataGrooming setDeleteCandidateList(Set<String> deleteCandidateList) {
this.deleteCandidateList = deleteCandidateList;
+ return this;
}
public int getDeleteCount() {
return deleteCount;
}
- public void setDeleteCount(int deleteCount) {
+ public DataGrooming setDeleteCount(int deleteCount) {
this.deleteCount = deleteCount;
+ return this;
}
-
- public ArrayList<String> getDupeGroups() {
- return dupeGroups;
- }
-
- public void setDupeGroups(ArrayList<String> dupeGroups) {
- this.dupeGroups = dupeGroups;
- }
-
-
} \ No newline at end of file
diff --git a/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java b/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
index 24aac9f..f469ef4 100644
--- a/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
+++ b/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
@@ -22,21 +22,19 @@ package org.onap.aai.db.schema;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.janusgraph.core.JanusGraph;
import org.janusgraph.core.PropertyKey;
-import org.janusgraph.core.schema.JanusGraphIndex;
import org.janusgraph.core.schema.JanusGraphManagement;
import org.janusgraph.core.schema.JanusGraphManagement.IndexBuilder;
-import org.janusgraph.core.schema.SchemaStatus;
import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.setup.SchemaVersions;
-import org.onap.aai.setup.SchemaVersion;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
public class ManageJanusGraphSchema {
-
+ protected Logger logger = LoggerFactory.getLogger(ManageJanusGraphSchema.class.getSimpleName());
+
private JanusGraphManagement graphMgmt;
private JanusGraph graph;
private List<DBProperty> aaiProperties;
@@ -45,6 +43,7 @@ public class ManageJanusGraphSchema {
private Auditor oxmInfo = null;
private Auditor graphInfo = null;
+
/**
* Instantiates a new manage JanusGraph schema.
*
@@ -70,81 +69,60 @@ public class ManageJanusGraphSchema {
aaiIndexes.addAll(oxmInfo.getAuditDoc().getIndexes());
aaiEdgeProperties.addAll(oxmInfo.getAuditDoc().getEdgeLabels());
try {
- createPropertyKeys();
+ for (DBProperty prop : aaiProperties) {
+ createProperty(graphMgmt, prop);
+ }
createIndexes();
createEdgeLabels();
} catch (Exception e) {
- e.printStackTrace();
+ logger.info("exception during schema build, executing rollback", e);
graphMgmt.rollback();
}
graphMgmt.commit();
}
/**
- * Creates the property keys.
- */
- private void createPropertyKeys() {
-
-
- for (DBProperty prop : aaiProperties) {
-
- if (graphMgmt.containsPropertyKey(prop.getName())) {
- PropertyKey key = graphMgmt.getPropertyKey(prop.getName());
- boolean isChanged = false;
- if (!prop.getCardinality().equals(key.cardinality())) {
- isChanged = true;
- }
- if (!prop.getTypeClass().equals(key.dataType())) {
- isChanged = true;
- }
- if (isChanged) {
- //must modify!
- this.replaceProperty(prop);
- }
- } else {
- //create a new property key
- System.out.println("Key: " + prop.getName() + " not found - adding");
- graphMgmt.makePropertyKey(prop.getName()).dataType(prop.getTypeClass()).cardinality(prop.getCardinality()).make();
- }
- }
-
- }
-
- /**
* Creates the indexes.
*/
private void createIndexes() {
+ final String IS_NEW = "isNew";
+ final String IS_CHANGED = "isChanged";
for (DBIndex index : aaiIndexes) {
Set<DBProperty> props = index.getProperties();
- boolean isChanged = false;
- boolean isNew = false;
List<PropertyKey> keyList = new ArrayList<>();
for (DBProperty prop : props) {
keyList.add(graphMgmt.getPropertyKey(prop.getName()));
}
- if (graphMgmt.containsGraphIndex(index.getName())) {
- JanusGraphIndex JanusGraphIndex = graphMgmt.getGraphIndex(index.getName());
- PropertyKey[] dbKeys = JanusGraphIndex.getFieldKeys();
- if (dbKeys.length != keyList.size()) {
- isChanged = true;
- } else {
- int i = 0;
- for (PropertyKey key : keyList) {
- if (!dbKeys[i].equals(key)) {
- isChanged = true;
- break;
- }
- i++;
+ Map<String, Boolean> isNewIsChanged = isIndexNewOrChanged(index, keyList, IS_NEW, IS_CHANGED);
+ if (!keyList.isEmpty()) {
+ this.createIndex(graphMgmt, index.getName(), keyList, index.isUnique(), isNewIsChanged.get("isNew"), isNewIsChanged.get("isChanged"));
+ }
+ }
+ }
+
+ private Map<String, Boolean> isIndexNewOrChanged(DBIndex index, List<PropertyKey> keyList, final String IS_NEW, final String IS_CHANGED) {
+ Map<String, Boolean> result = new HashMap<>();
+ result.put(IS_NEW, false);
+ result.put(IS_CHANGED, false);
+ if (graphMgmt.containsGraphIndex(index.getName())) {
+ PropertyKey[] dbKeys = graphMgmt.getGraphIndex(index.getName()).getFieldKeys();
+ if (dbKeys.length != keyList.size()) {
+ result.put(IS_CHANGED, true);
+ } else {
+ int i = 0;
+ for (PropertyKey key : keyList) {
+ if (!dbKeys[i].equals(key)) {
+ result.put(IS_CHANGED, true);
+ break;
}
+ i++;
}
- } else {
- isNew = true;
- }
- if (keyList.size() > 0) {
- this.createIndex(graphMgmt, index.getName(), keyList, index.isUnique(), isNew, isChanged);
}
+ } else {
+ result.put(IS_NEW, true);
}
+ return result;
}
// Use EdgeRules to make sure edgeLabels are defined in the db. NOTE: the multiplicty used here is
@@ -188,7 +166,7 @@ public class ManageJanusGraphSchema {
}
if (isChanged) {
//must modify!
- this.replaceProperty(prop);
+ this.replaceProperty();
}
} else {
//create a new property key
@@ -198,6 +176,13 @@ public class ManageJanusGraphSchema {
}
/**
+ * Replace property.
+ */
+ private void replaceProperty() {
+ //must modify!
+ }
+
+ /**
* Creates the index.
*
* @param mgmt the mgmt
@@ -209,88 +194,27 @@ public class ManageJanusGraphSchema {
*/
private void createIndex(JanusGraphManagement mgmt, String indexName, List<PropertyKey> keys, boolean isUnique, boolean isNew, boolean isChanged) {
- /*if (isChanged) {
- System.out.println("Changing index: " + indexName);
- JanusGraphIndex oldIndex = mgmt.getGraphIndex(indexName);
- mgmt.updateIndex(oldIndex, SchemaAction.DISABLE_INDEX);
- mgmt.commit();
- //cannot remove indexes
- //graphMgmt.updateIndex(oldIndex, SchemaAction.REMOVE_INDEX);
- }*/
- if (isNew || isChanged) {
-
- if (isNew) {
- IndexBuilder builder = mgmt.buildIndex(indexName,Vertex.class);
- for (PropertyKey k : keys) {
- builder.addKey(k);
- }
- if (isUnique) {
- builder.unique();
- }
- builder.buildCompositeIndex();
- System.out.println("Built index for " + indexName + " with keys: " + keys);
-
- //mgmt.commit();
+ if (isNew) {
+ IndexBuilder builder = mgmt.buildIndex(indexName,Vertex.class);
+ for (PropertyKey k : keys) {
+ builder.addKey(k);
}
-
- //mgmt = graph.asAdmin().getManagementSystem();
- //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.REGISTER_INDEX);
- //mgmt.commit();
-
- try {
- //waitForCompletion(indexName);
- //JanusGraphIndexRepair.hbaseRepair(AAIConstants.AAI_CONFIG_FILENAME, indexName, "");
- } catch (Exception e) {
- // TODO Auto-generated catch block
- graph.tx().rollback();
- graph.close();
- e.printStackTrace();
+ if (isUnique) {
+ builder.unique();
}
-
- //mgmt = graph.asAdmin().getManagementSystem();
- //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.REINDEX);
-
- //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.ENABLE_INDEX);
-
- //mgmt.commit();
-
+ builder.buildCompositeIndex();
+ System.out.println("Built index for " + indexName + " with keys: " + keys);
}
- }
+ if (isChanged) {
+ //System.out.println("Changing index: " + indexName);
+ //JanusGraphIndex oldIndex = mgmt.getGraphIndex(indexName);
+ //mgmt.updateIndex(oldIndex, SchemaAction.DISABLE_INDEX);
+ //mgmt.commit();
+ //cannot remove indexes
+ //graphMgmt.updateIndex(oldIndex, SchemaAction.REMOVE_INDEX);
- /**
- * Wait for completion.
- *
- * @param name the name
- * @throws InterruptedException the interrupted exception
- */
- private void waitForCompletion(String name) throws InterruptedException {
+ }
- boolean registered = false;
- long before = System.currentTimeMillis();
- while (!registered) {
- Thread.sleep(500L);
- JanusGraphManagement mgmt = graph.openManagement();
- JanusGraphIndex idx = mgmt.getGraphIndex(name);
- registered = true;
- for (PropertyKey k : idx.getFieldKeys()) {
- SchemaStatus s = idx.getIndexStatus(k);
- registered &= s.equals(SchemaStatus.REGISTERED);
- }
- mgmt.rollback();
- }
- System.out.println("Index REGISTERED in " + (System.currentTimeMillis() - before) + " ms");
- }
-
- /**
- * Replace property.
- *
- * @param key the key
- */
- private void replaceProperty(DBProperty key) {
-
-
-
-
}
/**
@@ -310,11 +234,9 @@ public class ManageJanusGraphSchema {
}
if (mgmt.containsGraphIndex(index.getName())) {
System.out.println("index already exists");
- isNew = false;
isChanged = true;
} else {
isNew = true;
- isChanged = false;
}
this.createIndex(mgmt, index.getName(), keys, index.isUnique(), isNew, isChanged);
diff --git a/src/main/java/org/onap/aai/dbgen/DupeTool.java b/src/main/java/org/onap/aai/dbgen/DupeTool.java
index 4164ee8..d8d3ce0 100644
--- a/src/main/java/org/onap/aai/dbgen/DupeTool.java
+++ b/src/main/java/org/onap/aai/dbgen/DupeTool.java
@@ -260,10 +260,10 @@ public class DupeTool {
logger.debug(msg);
// Determine what the key fields are for this nodeType (and we want them ordered)
- ArrayList<String> keyPropNamesArr = new ArrayList<String>(obj.getKeys());
+ ArrayList<String> keyPropNamesArr = new ArrayList<>(obj.getKeys());
// Determine what kinds of nodes (if any) this nodeType is dependent on for uniqueness
- ArrayList<String> depNodeTypeList = new ArrayList<String>();
+ ArrayList<String> depNodeTypeList = new ArrayList<>();
Collection<String> depNTColl = obj.getDependentOn();
Iterator<String> ntItr = depNTColl.iterator();
while (ntItr.hasNext()) {
@@ -274,7 +274,7 @@ public class DupeTool {
System.out.println(" ---- NOTE --- about to open graph (takes a little while)--------\n");
graph1 = setupGraph(logger);
gt1 = getGraphTransaction(graph1, logger);
- ArrayList<Vertex> verts2Check = new ArrayList<Vertex>();
+ ArrayList<Vertex> verts2Check = new ArrayList<>();
try {
verts2Check = figureOutNodes2Check(TRANSID, FROMAPPID, gt1,
nodeTypeVal, windowStartTime, filterParams, logger);
@@ -299,8 +299,8 @@ public class DupeTool {
System.out.println(msg);
}
- ArrayList<String> firstPassDupeSets = new ArrayList<String>();
- ArrayList<String> secondPassDupeSets = new ArrayList<String>();
+ ArrayList<String> firstPassDupeSets = new ArrayList<>();
+ ArrayList<String> secondPassDupeSets = new ArrayList<>();
Boolean isDependentOnParent = false;
if (!obj.getDependentOn().isEmpty()) {
isDependentOnParent = true;
@@ -332,7 +332,7 @@ public class DupeTool {
}
dupeGroupCount = firstPassDupeSets.size();
boolean didSomeDeletesFlag = false;
- ArrayList<String> dupeSetsToFix = new ArrayList<String>();
+ ArrayList<String> dupeSetsToFix = new ArrayList<>();
if (autoFix && firstPassDupeSets.size() == 0) {
msg = "AutoFix option is on, but no dupes were found on the first pass. Nothing to fix.";
logger.debug(msg);
@@ -500,7 +500,8 @@ public class DupeTool {
* @param version the version
* @param nType the n type
* @param passedVertList the passed vert list
- * @param dbMaps the db maps
+ * @param loader the loader
+ * @param logger the logger
* @return the array list
*/
private ArrayList<String> getDupeSets4NonDepNodes(String transId,
@@ -509,7 +510,7 @@ public class DupeTool {
ArrayList<String> keyPropNamesArr,
Boolean specialTenantRule, Loader loader, Logger logger) {
- ArrayList<String> returnList = new ArrayList<String>();
+ ArrayList<String> returnList = new ArrayList<>();
// We've been passed a set of nodes that we want to check.
// They are all NON-DEPENDENT nodes meaning that they should be
@@ -525,8 +526,8 @@ public class DupeTool {
// or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
// thought the third one was the one that should survive)
- HashMap<String, ArrayList<String>> keyVals2VidHash = new HashMap<String, ArrayList<String>>();
- HashMap<String, Vertex> vtxHash = new HashMap<String, Vertex>();
+ HashMap<String, ArrayList<String>> keyVals2VidHash = new HashMap<>();
+ HashMap<String, Vertex> vtxHash = new HashMap<>();
Iterator<Vertex> pItr = passedVertList.iterator();
while (pItr.hasNext()) {
try {
@@ -538,12 +539,12 @@ public class DupeTool {
String hKey = getNodeKeyValString(tvx, keyPropNamesArr, logger);
if (keyVals2VidHash.containsKey(hKey)) {
// We've already seen this key
- ArrayList<String> tmpVL = (ArrayList<String>) keyVals2VidHash.get(hKey);
+ ArrayList<String> tmpVL = keyVals2VidHash.get(hKey);
tmpVL.add(thisVid);
keyVals2VidHash.put(hKey, tmpVL);
} else {
// First time for this key
- ArrayList<String> tmpVL = new ArrayList<String>();
+ ArrayList<String> tmpVL = new ArrayList<>();
tmpVL.add(thisVid);
keyVals2VidHash.put(hKey, tmpVL);
}
@@ -558,7 +559,7 @@ public class DupeTool {
if (!vidList.isEmpty() && vidList.size() > 1) {
// There are more than one vertex id's using the same key info
String dupesStr = "";
- ArrayList<Vertex> vertList = new ArrayList<Vertex>();
+ ArrayList<Vertex> vertList = new ArrayList<>();
for (int i = 0; i < vidList.size(); i++) {
String tmpVid = vidList.get(i);
dupesStr = dupesStr + tmpVid + "|";
@@ -597,10 +598,9 @@ public class DupeTool {
* @param version the version
* @param nType the n type
* @param passedVertList the passed vert list
- * @param dbMaps the db maps
* @param keyPropNamesArr Array (ordered) of keyProperty names
* @param specialTenantRule flag
- * @param Logger the logger
+ * @param logger the logger
* @return the array list
*/
private ArrayList<String> getDupeSets4DependentNodes(String transId,
@@ -611,8 +611,8 @@ public class DupeTool {
// This is for nodeTypes that DEPEND ON A PARENT NODE FOR UNIQUNESS
- ArrayList<String> returnList = new ArrayList<String>();
- ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<String>();
+ ArrayList<String> returnList = new ArrayList<>();
+ ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<>();
// We've been passed a set of nodes that we want to check. These are
// all nodes that ARE DEPENDENT on a PARENT Node for uniqueness.
@@ -628,7 +628,7 @@ public class DupeTool {
// couldn't figure out which one to keep)
// or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
// thought the third one was the one that should survive)
- HashMap<String, Object> checkVertHash = new HashMap<String, Object>();
+ HashMap<String, Object> checkVertHash = new HashMap<>();
try {
Iterator<Vertex> pItr = passedVertList.iterator();
while (pItr.hasNext()) {
@@ -785,7 +785,7 @@ public class DupeTool {
public ArrayList<String> collectEdgeInfoForNode(Logger logger, Vertex tVert, boolean displayAllVidsFlag) {
- ArrayList<String> retArr = new ArrayList<String>();
+ ArrayList<String> retArr = new ArrayList<>();
Direction dir = Direction.OUT;
for (int i = 0; i <= 1; i++) {
if (i == 1) {
@@ -808,7 +808,7 @@ public class DupeTool {
vtx = ed.outVertex();
}
if (vtx == null) {
- retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+ retArr.add(String.format(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = %s <<< ", ed.id()));
} else {
String nType = vtx.<String>property("aai-node-type").orElse(null);
if (displayAllVidsFlag) {
@@ -858,13 +858,13 @@ public class DupeTool {
public ArrayList<Vertex> getNodeJustUsingKeyParams(String transId, String fromAppId, Graph graph, String nodeType,
HashMap<String, Object> keyPropsHash, String apiVersion, Logger logger) throws AAIException {
- ArrayList<Vertex> retVertList = new ArrayList<Vertex>();
+ ArrayList<Vertex> retVertList = new ArrayList<>();
// We assume that all NodeTypes have at least one key-property defined.
// Note - instead of key-properties (the primary key properties), a user could pass
// alternate-key values if they are defined for the nodeType.
- ArrayList<String> kName = new ArrayList<String>();
- ArrayList<Object> kVal = new ArrayList<Object>();
+ ArrayList<String> kName = new ArrayList<>();
+ ArrayList<Object> kVal = new ArrayList<>();
if (keyPropsHash == null || keyPropsHash.isEmpty()) {
throw new AAIException("AAI_6120", " NO key properties passed for this getNodeJustUsingKeyParams() request. NodeType = [" + nodeType + "]. ");
}
@@ -914,8 +914,7 @@ public class DupeTool {
}
if (retVertList.size() == 0) {
- logger.debug("DEBUG No node found for nodeType = [" + nodeType +
- "], propsAndVal = " + propsAndValuesForMsg);
+ logger.debug(String.format("DEBUG No node found for nodeType = [%s], propsAndVal = %s", nodeType, propsAndValuesForMsg));
}
return retVertList;
@@ -931,8 +930,8 @@ public class DupeTool {
* @param graph the graph
* @param nodeType the node type
* @param windowStartTime the window start time
- * @param propsHash the props hash
- * @param apiVersion the api version
+ * @param propsString the props hash
+ * @param logger the logger
* @return the nodes
* @throws AAIException the AAI exception
*/
@@ -940,7 +939,7 @@ public class DupeTool {
Graph graph, String nodeType, long windowStartTime,
String propsString, Logger logger) throws AAIException {
- ArrayList<Vertex> retVertList = new ArrayList<Vertex>();
+ ArrayList<Vertex> retVertList = new ArrayList<>();
String msg = "";
GraphTraversal<Vertex, Vertex> tgQ = graph.traversal().V().has("aai-node-type", nodeType);
String qStringForMsg = "graph.traversal().V().has(\"aai-node-type\"," + nodeType + ")";
@@ -1002,7 +1001,7 @@ public class DupeTool {
}
if (retVertList.size() == 0) {
- logger.debug("DEBUG No node found for: [" + qStringForMsg + ", with aai-created-ts > " + windowStartTime);
+ logger.debug(String.format("DEBUG No node found for: [%s, with aai-created-ts > %d", qStringForMsg, windowStartTime));
}
return retVertList;
@@ -1018,7 +1017,8 @@ public class DupeTool {
* @param g the g
* @param dupeVertexList the dupe vertex list
* @param ver the ver
- * @param Logger the logger
+ * @param loader the loader
+ * @param logger the logger
* @return Vertex
* @throws AAIException the AAI exception
*/
@@ -1098,14 +1098,15 @@ public class DupeTool {
/**
* Pick one of two dupes.
*
- * @param transId the trans id
- * @param fromAppId the from app id
- * @param g the graphTraversalSource
- * @param vtxA the vtx A
- * @param vtxB the vtx B
- * @param ver the ver
- * @param boolean specialTenantRuleFlag flag
- * @param Logger the logger
+ * @param transId the trans id
+ * @param fromAppId the from app id
+ * @param gts the graphTraversalSource
+ * @param vtxA the vtx A
+ * @param vtxB the vtx B
+ * @param ver the ver
+ * @param specialTenantRule specialTenantRuleFlag flag
+ * @param loader the loader
+ * @param logger the logger
* @return Vertex
* @throws AAIException the AAI exception
*/
@@ -1164,12 +1165,12 @@ public class DupeTool {
// Collect the vid's and aai-node-types of the vertices that each vertex
// (A and B) is connected to.
- ArrayList<String> vtxIdsConn2A = new ArrayList<String>();
- ArrayList<String> vtxIdsConn2B = new ArrayList<String>();
- HashMap<String, String> nodeTypesConn2A = new HashMap<String, String>();
- HashMap<String, String> nodeTypesConn2B = new HashMap<String, String>();
+ ArrayList<String> vtxIdsConn2A = new ArrayList<>();
+ ArrayList<String> vtxIdsConn2B = new ArrayList<>();
+ HashMap<String, String> nodeTypesConn2A = new HashMap<>();
+ HashMap<String, String> nodeTypesConn2B = new HashMap<>();
- ArrayList<String> retArr = new ArrayList<String>();
+ ArrayList<String> retArr = new ArrayList<>();
Iterator<Edge> eAI = vtxA.edges(Direction.BOTH);
while (eAI.hasNext()) {
Edge ed = eAI.next();
@@ -1348,7 +1349,7 @@ public class DupeTool {
* @param version the version
* @param nType the n type
* @param passedVertList the passed vert list
- * @param dbMaps the db maps
+ * @param loader the loader
* @return the hash map
* @throws AAIException the AAI exception
*/
@@ -1391,8 +1392,7 @@ public class DupeTool {
}// end of groupVertsByDepNodes()
- private Vertex getConnectedParent(GraphTraversalSource g,
- Vertex startVtx) throws AAIException {
+ private Vertex getConnectedParent(GraphTraversalSource g, Vertex startVtx) {
Vertex parentVtx = null;
// This traversal does not assume a parent/child edge direction
@@ -1436,9 +1436,9 @@ public class DupeTool {
/**
* Delete non keepers if appropriate.
*
- * @param g the g
- * @param dupeSetStr the dupe string
- * @param logger the Logger
+ * @param g the g
+ * @param dupeInfoString the dupe string
+ * @param logger the Logger
* @return the boolean
*/
private Boolean deleteNonKeeperForOneSet(Graph g,
@@ -1451,7 +1451,7 @@ public class DupeTool {
String[] dupeArr = dupeInfoString.split("\\|");
- ArrayList<String> idArr = new ArrayList<String>();
+ ArrayList<String> idArr = new ArrayList<>();
int lastIndex = dupeArr.length - 1;
for (int i = 0; i <= lastIndex; i++) {
if (i < lastIndex) {
@@ -1525,14 +1525,14 @@ public class DupeTool {
* Get values of the key properties for a node.
*
* @param tvx the vertex to pull the properties from
- * @param keyPropertyNames ArrayList (ordered) of key prop names
+ * @param keyPropNamesArr ArrayList (ordered) of key prop names
* @param logger the Logger
* @return a hashMap of the propertyNames/values
*/
private HashMap<String, Object> getNodeKeyVals(Vertex tvx,
ArrayList<String> keyPropNamesArr, Logger logger) {
- HashMap<String, Object> retHash = new HashMap<String, Object>();
+ HashMap<String, Object> retHash = new HashMap<>();
Iterator<String> propItr = keyPropNamesArr.iterator();
while (propItr.hasNext()) {
String propName = propItr.next();
@@ -1549,25 +1549,21 @@ public class DupeTool {
/**
* makes sure aai-uri exists and can be used to get this node back
- *
- * @param transId the trans id
- * @param fromAppId the from app id
+ *
* @param graph the graph
- * @param vtx
- * @param Logger
+ * @param origVtx
+ * @param eLogger
* @return true if aai-uri is populated and the aai-uri-index points to this vtx
* @throws AAIException the AAI exception
*/
- private Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx, Logger eLogger )
- throws AAIException{
+ private Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx, Logger eLogger ) {
String aaiUriStr = "";
try {
Object ob = origVtx.<Object>property("aai-uri").orElse(null);
String origVid = origVtx.id().toString();
if (ob == null || ob.toString().equals("")) {
// It is missing its aai-uri
- eLogger.debug("DEBUG No [aai-uri] property found for vid = ["
- + origVid + "] " );
+ eLogger.debug(String.format("DEBUG No [aai-uri] property found for vid = [%s] ", origVid));
return false;
}
else {
@@ -1579,24 +1575,16 @@ public class DupeTool {
Vertex foundV = verts.next();
String foundVid = foundV.id().toString();
if( !origVid.equals(foundVid) ){
- eLogger.debug("DEBUG aai-uri key property ["
- + aaiUriStr + "] for vid = ["
- + origVid + "] brought back different vertex with vid = ["
- + foundVid + "]." );
+ eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] brought back different vertex with vid = [%s].", aaiUriStr, origVid, foundVid));
return false;
}
}
if( count == 0 ){
- eLogger.debug("DEBUG aai-uri key property ["
- + aaiUriStr + "] for vid = ["
- + origVid + "] could not be used to query for that vertex. ");
+ eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] could not be used to query for that vertex. ", aaiUriStr, origVid));
return false;
}
else if( count > 1 ){
- eLogger.debug("DEBUG aai-uri key property ["
- + aaiUriStr + "] for vid = ["
- + origVid + "] brought back multiple ("
- + count + ") vertices instead of just one. ");
+ eLogger.debug(String.format("DEBUG aai-uri key property [%s] for vid = [%s] brought back multiple (%d) vertices instead of just one. ", aaiUriStr, origVid, count));
return false;
}
}
@@ -1613,7 +1601,7 @@ public class DupeTool {
* Get values of the key properties for a node as a single string
*
* @param tvx the vertex to pull the properties from
- * @param keyPropertyNames collection of key prop names
+ * @param keyPropNamesArr collection of key prop names
* @param logger the Logger
* @return a String of concatenated values
*/
@@ -1642,13 +1630,13 @@ public class DupeTool {
*
* @param firstPassDupeSets from the first pass
* @param secondPassDupeSets from the second pass
- * @param Logger logger
+ * @param logger logger
* @return commonDupeSets that are common to both passes and have a determined keeper
*/
private ArrayList<String> figureWhichDupesStillNeedFixing(ArrayList<String> firstPassDupeSets,
ArrayList<String> secondPassDupeSets, Logger logger) {
- ArrayList<String> common2BothSet = new ArrayList<String>();
+ ArrayList<String> common2BothSet = new ArrayList<>();
// We just want to look for entries from the first set which have identical (almost)
// entries in the secondary set. I say "almost" because the order of the
@@ -1740,7 +1728,7 @@ public class DupeTool {
private HashMap<String, ArrayList<String>> makeKeeperHashOfDupeStrings(ArrayList<String> dupeSets,
ArrayList<String> excludeSets, Logger logger) {
- HashMap<String, ArrayList<String>> keeperHash = new HashMap<String, ArrayList<String>>();
+ HashMap<String, ArrayList<String>> keeperHash = new HashMap<>();
for (int x = 0; x < dupeSets.size(); x++) {
String tmpSetStr = dupeSets.get(x);
@@ -1750,7 +1738,7 @@ public class DupeTool {
}
String[] dupeArr = tmpSetStr.split("\\|");
- ArrayList<String> delIdArr = new ArrayList<String>();
+ ArrayList<String> delIdArr = new ArrayList<>();
int lastIndex = dupeArr.length - 1;
for (int i = 0; i <= lastIndex; i++) {
if (i < lastIndex) {
@@ -1842,7 +1830,7 @@ public class DupeTool {
public JanusGraph setupGraph(Logger logger) {
- JanusGraph JanusGraph = null;
+ JanusGraph janusGraph = null;
try (InputStream inputStream = new FileInputStream(AAIConstants.REALTIME_DB_CONFIG);) {
@@ -1851,17 +1839,17 @@ public class DupeTool {
properties.load(inputStream);
if ("inmemory".equals(properties.get("storage.backend"))) {
- JanusGraph = AAIGraph.getInstance().getGraph();
+ janusGraph = AAIGraph.getInstance().getGraph();
graphType = "inmemory";
} else {
- JanusGraph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DupeTool.class.getSimpleName()).withGraphType("realtime" + graphIndex).buildConfiguration());
+ janusGraph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DupeTool.class.getSimpleName()).withGraphType("realtime" + graphIndex).buildConfiguration());
graphIndex++;
}
} catch (Exception e) {
logger.error("Unable to open the graph", e);
}
- return JanusGraph;
+ return janusGraph;
}
public void closeGraph(JanusGraph graph, Logger logger) {
@@ -1879,7 +1867,7 @@ public class DupeTool {
logger.warn("WARNING from final graph.shutdown()", ex);
}
}
-
+
public int getDupeGroupCount() {
return dupeGroupCount;
}
diff --git a/src/main/java/org/onap/aai/util/ExceptionTranslator.java b/src/main/java/org/onap/aai/util/ExceptionTranslator.java
index 17c5667..e8c9ede 100644
--- a/src/main/java/org/onap/aai/util/ExceptionTranslator.java
+++ b/src/main/java/org/onap/aai/util/ExceptionTranslator.java
@@ -22,19 +22,23 @@ package org.onap.aai.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.onap.aai.GraphAdminApp;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.logging.LogFormatTools;
public class ExceptionTranslator {
private static final Logger LOGGER = LoggerFactory.getLogger(ExceptionTranslator.class);
+
+ private ExceptionTranslator() {
+
+ }
+
public static AAIException schemaServiceExceptionTranslator(Exception ex) {
- AAIException aai = null;
+ AAIException aai;
if ( ExceptionUtils.getRootCause(ex) == null || ExceptionUtils.getRootCause(ex).getMessage() == null ) {
aai = new AAIException("AAI_3025","Error parsing exception - Please Investigate" +
LogFormatTools.getStackTop(ex));
} else {
- LOGGER.info("Exception is " + ExceptionUtils.getRootCause(ex).getMessage() + "Root cause is"+ ExceptionUtils.getRootCause(ex).toString());
+ LOGGER.info(String.format("Exception is %sRoot cause is%s", ExceptionUtils.getRootCause(ex).getMessage(), ExceptionUtils.getRootCause(ex).toString()));
if(ExceptionUtils.getRootCause(ex).getMessage().contains("NodeIngestor")){
aai = new AAIException("AAI_3026","Error reading OXM from SchemaService - Investigate");
}
diff --git a/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java b/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java
index 202bc0a..bc2810b 100644
--- a/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java
+++ b/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java
@@ -26,14 +26,18 @@ import org.slf4j.LoggerFactory;
public class GraphAdminDBUtils {
- private static Logger LOGGER = LoggerFactory.getLogger(GraphAdminDBUtils.class);
+ private static Logger logger = LoggerFactory.getLogger(GraphAdminDBUtils.class);
+
+ private GraphAdminDBUtils() {
+
+ }
public static void logConfigs(org.apache.commons.configuration.Configuration configuration) {
if (configuration != null && configuration.getKeys() != null) {
Iterator<String> keys = configuration.getKeys();
keys.forEachRemaining(
- key -> LOGGER.info("Key is " + key + "Value is " + configuration.getProperty(key).toString()));
+ key -> logger.info("Key is " + key + "Value is " + configuration.getProperty(key).toString()));
}
}
diff --git a/src/main/java/org/onap/aai/util/PositiveNumValidator.java b/src/main/java/org/onap/aai/util/PositiveNumValidator.java
index ee58f55..3506dd9 100644
--- a/src/main/java/org/onap/aai/util/PositiveNumValidator.java
+++ b/src/main/java/org/onap/aai/util/PositiveNumValidator.java
@@ -25,7 +25,7 @@ import com.beust.jcommander.ParameterException;
public class PositiveNumValidator implements IParameterValidator {
@Override
- public void validate(String name, String value) throws ParameterException {
+ public void validate(String name, String value) {
int num = Integer.parseInt(value);
if(num < 0) {
diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java
index 36d01e1..865dc27 100644
--- a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java
+++ b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java
@@ -51,7 +51,6 @@ public class SendDeleteMigrationNotifications {
private String config;
private String path;
- private Set<String> notifyOn;
long sleepInMilliSecs;
int numToBatch;
private String requestId;
@@ -68,7 +67,7 @@ public class SendDeleteMigrationNotifications {
protected final SchemaVersions schemaVersions;
protected final SchemaVersion version;
- public SendDeleteMigrationNotifications(LoaderFactory loaderFactory, SchemaVersions schemaVersions, String config, String path, Set<String> notifyOn, int sleepInMilliSecs, int numToBatch, String requestId, EventAction eventAction, String eventSource) {
+ public SendDeleteMigrationNotifications(LoaderFactory loaderFactory, SchemaVersions schemaVersions, String config, String path, int sleepInMilliSecs, int numToBatch, String requestId, EventAction eventAction, String eventSource) {
System.setProperty("aai.service.name", SendDeleteMigrationNotifications.class.getSimpleName());
Properties props = System.getProperties();
props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
@@ -78,7 +77,6 @@ public class SendDeleteMigrationNotifications {
this.config = config;
this.path = path;
- this.notifyOn = notifyOn;
this.sleepInMilliSecs = sleepInMilliSecs;
this.numToBatch = numToBatch;
this.requestId = requestId;
@@ -90,11 +88,11 @@ public class SendDeleteMigrationNotifications {
initGraph();
initFields();
-
+
}
- public void process(String basePath) throws Exception {
+ public void process(String basePath) {
try {
Map<Integer, String> deleteDataMap = processFile();
@@ -124,8 +122,7 @@ public class SendDeleteMigrationNotifications {
}
cleanup();
} catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+ logger.warn("Exception caught during SendDeleteMigrationNotifications.process()", e);
}
}
diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
index 0fbe520..0bd254f 100644
--- a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
+++ b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
@@ -65,7 +65,7 @@ public class SendDeleteMigrationNotificationsMain {
EventAction action = EventAction.valueOf(cArgs.eventAction.toUpperCase());
- SendDeleteMigrationNotifications internal = new SendDeleteMigrationNotifications(loaderFactory, schemaVersions, cArgs.config, cArgs.file, new HashSet<>(cArgs.notifyOn), cArgs.sleepInMilliSecs, cArgs.numToBatch, requestId, action, cArgs.eventSource);
+ SendDeleteMigrationNotifications internal = new SendDeleteMigrationNotifications(loaderFactory, schemaVersions, cArgs.config, cArgs.file, cArgs.sleepInMilliSecs, cArgs.numToBatch, requestId, action, cArgs.eventSource);
try {
internal.process(basePath);
diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotifications.java b/src/main/java/org/onap/aai/util/SendMigrationNotifications.java
index c51de1e..dfe2649 100644
--- a/src/main/java/org/onap/aai/util/SendMigrationNotifications.java
+++ b/src/main/java/org/onap/aai/util/SendMigrationNotifications.java
@@ -54,8 +54,8 @@ public class SendMigrationNotifications {
private String config;
private String path;
private Set<String> notifyOn;
- long sleepInMilliSecs;
- int numToBatch;
+ private long sleepInMilliSecs;
+ private int numToBatch;
private String requestId;
private EventAction eventAction;
private String eventSource;
@@ -108,25 +108,22 @@ public class SendMigrationNotifications {
vertexes = g.V(entry.getKey()).toList();
if (vertexes == null || vertexes.isEmpty()) {
logAndPrint("Vertex " + entry.getKey() + " no longer exists." );
- continue;
} else if (vertexes.size() > 1) {
logAndPrint("Vertex " + entry.getKey() + " query returned " + vertexes.size() + " vertexes." );
- continue;
} else {
logger.debug("Processing " + entry.getKey() + "resource-version " + entry.getValue());
v = vertexes.get(0);
- if (notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString())) {
- if (entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) {
- Introspector introspector = serializer.getLatestVersionView(v);
- uri = this.serializer.getURIForVertex(v, false);
- this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath);
- count++;
- if (count >= this.numToBatch) {
- trigger();
- logger.debug("Triggered " + entry.getKey());
- count = 0;
- Thread.sleep(this.sleepInMilliSecs);
- }
+ if ((notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString()))
+ && entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) {
+ Introspector introspector = serializer.getLatestVersionView(v);
+ uri = this.serializer.getURIForVertex(v, false);
+ this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath);
+ count++;
+ if (count >= this.numToBatch) {
+ trigger();
+ logger.debug("Triggered " + entry.getKey());
+ count = 0;
+ Thread.sleep(this.sleepInMilliSecs);
}
}
}
diff --git a/src/main/java/org/onap/aai/util/UniquePropertyCheck.java b/src/main/java/org/onap/aai/util/UniquePropertyCheck.java
index 2db3dd5..8ef5139 100644
--- a/src/main/java/org/onap/aai/util/UniquePropertyCheck.java
+++ b/src/main/java/org/onap/aai/util/UniquePropertyCheck.java
@@ -62,13 +62,11 @@ public class UniquePropertyCheck {
Logger logger = LoggerFactory.getLogger(UniquePropertyCheck.class);
MDC.put("logFilenameAppender", UniquePropertyCheck.class.getSimpleName());
AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog();
- auditLog.logBefore("UniquePropertyCheck", ONAPComponents.AAI.toString());
+ auditLog.logBefore(COMPONENT, ONAPComponents.AAI.toString());
if( args == null || args.length != 1 ){
String msg = "usage: UniquePropertyCheck propertyName \n";
System.out.println(msg);
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
logAndPrint(logger, msg );
System.exit(1);
}
@@ -81,31 +79,23 @@ public class UniquePropertyCheck {
JanusGraph tGraph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(UniquePropertyCheck.class.getSimpleName()).withGraphType("realtime").buildConfiguration());
if( tGraph == null ) {
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
logAndPrint(logger, " Error: Could not get JanusGraph ");
System.exit(1);
}
graph = tGraph.newTransaction();
if( graph == null ){
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
logAndPrint(logger, "could not get graph object in UniquePropertyCheck() \n");
System.exit(0);
}
}
catch (AAIException e1) {
String msg = "Threw Exception: [" + e1.toString() + "]";
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
logAndPrint(logger, msg);
System.exit(0);
}
catch (Exception e2) {
String msg = "Threw Exception: [" + e2.toString() + "]";
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
logAndPrint(logger, msg);
System.exit(0);
}
@@ -135,8 +125,8 @@ public class UniquePropertyCheck {
// tool looks across all nodeTypes that the property is found in.
Boolean foundDupesFlag = false;
- HashMap <String,String> valuesAndVidHash = new HashMap <String, String> ();
- HashMap <String,String> dupeHash = new HashMap <String, String> ();
+ HashMap <String,String> valuesAndVidHash = new HashMap <> ();
+ HashMap <String,String> dupeHash = new HashMap <> ();
int propCount = 0;
int dupeCount = 0;
@@ -177,7 +167,7 @@ public class UniquePropertyCheck {
while( dupeItr.hasNext() ){
foundDupesFlag = true;
Map.Entry pair = (Map.Entry) dupeItr.next();
- String dupeValue = pair.getKey().toString();;
+ String dupeValue = pair.getKey().toString();
String vidsStr = pair.getValue().toString();
String[] vidArr = vidsStr.split("\\|");
logAndPrint(logger, "\n\n -------------- Found " + vidArr.length
@@ -193,8 +183,6 @@ public class UniquePropertyCheck {
}
}
catch( Exception e2 ){
- //LoggingContext.statusCode(StatusCode.ERROR);
- //LoggingContext.responseCode(LoggingContext.DATA_ERROR);
logAndPrint(logger, "Threw Exception: [" + e2.toString() + "]");
}