diff options
Diffstat (limited to 'aai-core')
-rw-r--r-- | aai-core/pom.xml | 16 | ||||
-rw-r--r-- | aai-core/src/main/java/org/onap/aai/audit/ListEndpoints.java | 14 | ||||
-rw-r--r-- | aai-core/src/main/java/org/onap/aai/dbgen/DataGrooming.java | 207 |
3 files changed, 119 insertions, 118 deletions
diff --git a/aai-core/pom.xml b/aai-core/pom.xml index fc658762..73f2a74d 100644 --- a/aai-core/pom.xml +++ b/aai-core/pom.xml @@ -45,6 +45,10 @@ <sonar.projectVersion>${project.version}</sonar.projectVersion> <httpclient.version>4.5.1</httpclient.version> <jackson.version>2.2.3</jackson.version> + <eelf.core.version>1.0.0</eelf.core.version> + <logback.version>1.2.3</logback.version> + <freemarker.version>2.3.21</freemarker.version> + <activemq.version>5.15.3</activemq.version> </properties> <profiles> <profile> @@ -341,7 +345,7 @@ <dependency> <groupId>com.att.eelf</groupId> <artifactId>eelf-core</artifactId> - <version>1.0.0</version> + <version>${eelf.core.version}</version> </dependency> <dependency> <groupId>org.codehaus.jackson</groupId> @@ -533,17 +537,17 @@ <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> - <version>1.1.7</version> + <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> - <version>1.1.7</version> + <version>${logback.version}</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-access</artifactId> - <version>1.1.7</version> + <version>${logback.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> @@ -585,7 +589,7 @@ <dependency> <groupId>org.apache.activemq</groupId> <artifactId>activemq-broker</artifactId> - <version>5.14.4</version> + <version>${activemq.version}</version> </dependency> <dependency> <groupId>com.opencsv</groupId> @@ -595,7 +599,7 @@ <dependency> <groupId>org.freemarker</groupId> <artifactId>freemarker</artifactId> - <version>2.3.14</version> + <version>${freemarker.version}</version> </dependency> <dependency> <groupId>com.github.fge</groupId> diff --git a/aai-core/src/main/java/org/onap/aai/audit/ListEndpoints.java b/aai-core/src/main/java/org/onap/aai/audit/ListEndpoints.java index 310655fe..3ae6d802 100644 --- a/aai-core/src/main/java/org/onap/aai/audit/ListEndpoints.java +++ b/aai-core/src/main/java/org/onap/aai/audit/ListEndpoints.java @@ -51,8 +51,8 @@ public class ListEndpoints { private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ListEndpoints.class); - private final String start = "inventory"; - private final String[] blacklist = { "search", "aai-internal" }; + private final static String start = "inventory"; + private final static String[] blacklist = { "search", "aai-internal" }; private List<String> endpoints = new ArrayList<>(); private Map<String, String> endpointToLogicalName = new HashMap<String, String>(); @@ -97,20 +97,18 @@ public class ListEndpoints { String currentUri = ""; - if (!obj.getDbName().equals("inventory")) { + if (!obj.getDbName().equals(start)) { currentUri = uri + obj.getGenericURI(); } else { currentUri = uri; } - if (obj.getName().equals("relationship-data") || obj.getName().equals("related-to-property")) { + if ("relationship-data".equals(obj.getName()) || "related-to-property".equals(obj.getName())) { return; } if (!obj.isContainer()) { endpoints.add(currentUri); } - String dbName = obj.getDbName(); - populateLogicalName(obj, uri, currentUri); Set<String> properties = obj.getProperties(); @@ -177,7 +175,7 @@ public class ListEndpoints { */ private void populateLogicalName(Introspector obj, String uri, String currentUri) { - if (obj.getDbName().equals("inventory") || currentUri.split("/").length <= 4 || currentUri.endsWith("relationship-list")) { + if (obj.getDbName().equals(start) || currentUri.split("/").length <= 4 || currentUri.endsWith("relationship-list")) { return; } @@ -249,7 +247,7 @@ public class ListEndpoints { List<String> result = new ArrayList<>(); Pattern p = null; Matcher m = null; - if (!filterOut.equals("")) { + if (!filterOut.isEmpty()) { p = Pattern.compile(filterOut); m = null; } diff --git a/aai-core/src/main/java/org/onap/aai/dbgen/DataGrooming.java b/aai-core/src/main/java/org/onap/aai/dbgen/DataGrooming.java index ce820f60..f201a57e 100644 --- a/aai-core/src/main/java/org/onap/aai/dbgen/DataGrooming.java +++ b/aai-core/src/main/java/org/onap/aai/dbgen/DataGrooming.java @@ -61,7 +61,6 @@ import org.onap.aai.logging.LoggingContext; import org.onap.aai.serialization.db.AAIDirection; import org.onap.aai.serialization.db.EdgeProperty; import org.onap.aai.util.*; -import org.onap.aai.logging.LoggingContext; import org.onap.aai.logging.LoggingContext.StatusCode; import com.att.eelf.configuration.Configuration; @@ -73,7 +72,7 @@ import com.thinkaurelius.titan.core.TitanGraph; public class DataGrooming { - private static EELFLogger LOGGER; + private static EELFLogger logger; private static final String FROMAPPID = "AAI-DB"; private static final String TRANSID = UUID.randomUUID().toString(); private static int dupeGrpsDeleted = 0; @@ -90,7 +89,7 @@ public class DataGrooming { Properties props = System.getProperties(); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_DATA_GROOMING_LOGBACK_PROPS); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES); - LOGGER = EELFManager.getInstance().getLogger(DataGrooming.class); + logger = EELFManager.getInstance().getLogger(DataGrooming.class); String ver = "version"; // Placeholder Boolean doAutoFix = false; Boolean edgesOnlyFlag = false; @@ -132,7 +131,7 @@ public class DataGrooming { } catch ( Exception e ){ // Don't worry, we'll just use the defaults that we got from AAIConstants - LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. "); + logger.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. "); } String prevFileName = ""; @@ -171,7 +170,7 @@ public class DataGrooming { if (i >= args.length) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" No value passed with -maxFix option. "); + logger.error(" No value passed with -maxFix option. "); AAISystemExitUtil.systemExitCloseAAIGraph(0); } String nextArg = args[i]; @@ -180,7 +179,7 @@ public class DataGrooming { } catch (Exception e) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -maxFix option: [" + logger.error("Bad value passed with -maxFix option: [" + nextArg + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(0); } @@ -189,7 +188,7 @@ public class DataGrooming { if (i >= args.length) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("No value passed with -sleepMinutes option."); + logger.error("No value passed with -sleepMinutes option."); AAISystemExitUtil.systemExitCloseAAIGraph(0); } String nextArg = args[i]; @@ -198,7 +197,7 @@ public class DataGrooming { } catch (Exception e) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -sleepMinutes option: [" + logger.error("Bad value passed with -sleepMinutes option: [" + nextArg + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(0); } @@ -207,7 +206,7 @@ public class DataGrooming { if (i >= args.length) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("No value passed with -timeWindowMinutes option."); + logger.error("No value passed with -timeWindowMinutes option."); AAISystemExitUtil.systemExitCloseAAIGraph(0); } String nextArg = args[i]; @@ -216,7 +215,7 @@ public class DataGrooming { } catch (Exception e) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("Bad value passed with -timeWindowMinutes option: [" + logger.error("Bad value passed with -timeWindowMinutes option: [" + nextArg + "]"); AAISystemExitUtil.systemExitCloseAAIGraph(0); } @@ -226,16 +225,16 @@ public class DataGrooming { if (i >= args.length) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" No value passed with -f option. "); + logger.error(" No value passed with -f option. "); AAISystemExitUtil.systemExitCloseAAIGraph(0); } prevFileName = args[i]; } else { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error(" Unrecognized argument passed to DataGrooming: [" + logger.error(" Unrecognized argument passed to DataGrooming: [" + thisArg + "]. "); - LOGGER.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -skipEdgeChecks -dupeFixOn -donFixOrphans -timeWindowMinutes -sleepMinutes -neverUseCache"); + logger.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -skipEdgeChecks -dupeFixOn -donFixOrphans -timeWindowMinutes -sleepMinutes -neverUseCache"); AAISystemExitUtil.systemExitCloseAAIGraph(0); } } @@ -254,19 +253,19 @@ public class DataGrooming { catch (Exception ex){ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR); - LOGGER.error("ERROR - Could not create loader " + LogFormatTools.getStackTop(ex)); + logger.error("ERROR - Could not create loader " + LogFormatTools.getStackTop(ex)); AAISystemExitUtil.systemExitCloseAAIGraph(1); } if (skipHostCheck) { - LOGGER.info(" We will skip the HostCheck as requested. "); + logger.info(" We will skip the HostCheck as requested. "); } try { - if (!prevFileName.equals("")) { + if (!prevFileName.isEmpty()) { // They are trying to fix some data based on a data in a // previous file. - LOGGER.info(" Call doTheGrooming() with a previous fileName [" + logger.info(" Call doTheGrooming() with a previous fileName [" + prevFileName + "] for cleanup. "); Boolean finalShutdownFlag = true; Boolean cacheDbOkFlag = false; @@ -282,8 +281,8 @@ public class DataGrooming { // that were found by the first run. // Note: we will produce a separate output file for each of the // two runs. - LOGGER.info(" Doing an auto-fix call to Grooming. "); - LOGGER.info(" First, Call doTheGrooming() to look at what's out there. "); + logger.info(" Doing an auto-fix call to Grooming. "); + logger.info(" First, Call doTheGrooming() to look at what's out there. "); Boolean finalShutdownFlag = false; Boolean cacheDbOkFlag = true; int fixCandCount = doTheGrooming("", edgesOnlyFlag, @@ -292,24 +291,24 @@ public class DataGrooming { finalShutdownFlag, cacheDbOkFlag, skipEdgeCheckFlag, timeWindowMinutes); if (fixCandCount == 0) { - LOGGER.info(" No fix-Candidates were found by the first pass, so no second/fix-pass is needed. "); + logger.info(" No fix-Candidates were found by the first pass, so no second/fix-pass is needed. "); } else { // We'll sleep a little and then run a fix-pass based on the // first-run's output file. try { - LOGGER.info("About to sleep for " + sleepMinutes + logger.info("About to sleep for " + sleepMinutes + " minutes."); int sleepMsec = sleepMinutes * 60 * 1000; Thread.sleep(sleepMsec); } catch (InterruptedException ie) { - LOGGER.info("\n >>> Sleep Thread has been Interrupted <<< "); + logger.info("\n >>> Sleep Thread has been Interrupted <<< "); AAISystemExitUtil.systemExitCloseAAIGraph(0); } dteStr = fd.getDateTime(); String secondGroomOutFileName = "dataGrooming." + dteStr + ".out"; - LOGGER.info(" Now, call doTheGrooming() a second time and pass in the name of the file " + logger.info(" Now, call doTheGrooming() a second time and pass in the name of the file " + "generated by the first pass for fixing: [" + groomOutFileName + "]"); finalShutdownFlag = true; @@ -325,7 +324,7 @@ public class DataGrooming { // Do the grooming - plain vanilla (no fix-it-file, no // auto-fixing) Boolean finalShutdownFlag = true; - LOGGER.info(" Call doTheGrooming() "); + logger.info(" Call doTheGrooming() "); Boolean cacheDbOkFlag = true; if( neverUseCache ){ // They have forbidden us from using a cached db connection. @@ -340,10 +339,10 @@ public class DataGrooming { } catch (Exception ex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("Exception while grooming data " + LogFormatTools.getStackTop(ex)); + logger.error("Exception while grooming data " + LogFormatTools.getStackTop(ex)); } - LOGGER.info(" Done! "); + logger.info(" Done! "); AAISystemExitUtil.systemExitCloseAAIGraph(0); }// End of main() @@ -375,7 +374,7 @@ public class DataGrooming { Boolean finalShutdownFlag, Boolean cacheDbOkFlag, Boolean skipEdgeCheckFlag, int timeWindowMinutes) { - LOGGER.debug(" Entering doTheGrooming \n"); + logger.debug(" Entering doTheGrooming \n"); int cleanupCandidateCount = 0; long windowStartTime = 0; // Translation of the window into a starting timestamp @@ -403,7 +402,7 @@ public class DataGrooming { // Make sure the target directory exists new File(targetDir).mkdirs(); - if (!fileNameForFixing.equals("")) { + if (!fileNameForFixing.isEmpty()) { deleteCandidateList = getDeleteList(targetDir, fileNameForFixing, edgesOnlyFlag, dontFixOrphansFlag, dupeFixOn); @@ -412,7 +411,7 @@ public class DataGrooming { if (deleteCandidateList.size() > maxRecordsToFix) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(" >> WARNING >> Delete candidate list size (" + logger.warn(" >> WARNING >> Delete candidate list size (" + deleteCandidateList.size() + ") is too big. The maxFix we are using is: " + maxRecordsToFix @@ -432,11 +431,11 @@ public class DataGrooming { throw new AAIException("AAI_6124", emsg); } - LOGGER.info(" Will write to " + fullOutputFileName ); + logger.info(" Will write to " + fullOutputFileName ); bw = new BufferedWriter(new FileWriter(groomOutFile.getAbsoluteFile())); ErrorLogHelper.loadProperties(); - LOGGER.info(" ---- NOTE --- about to open graph (takes a little while)--------\n"); + logger.info(" ---- NOTE --- about to open graph (takes a little while)--------\n"); if( cacheDbOkFlag ){ // Since we're just reading (not deleting/fixing anything), we can use @@ -451,7 +450,7 @@ public class DataGrooming { throw new AAIException("AAI_6101", emsg); } - LOGGER.debug(" Got the graph object. "); + logger.debug(" Got the graph object. "); g = graph.newTransaction(); if (g == null) { @@ -475,10 +474,10 @@ public class DataGrooming { Set<Entry<String, Introspector>> entrySet = loader.getAllObjects().entrySet(); String ntList = ""; - LOGGER.info(" Starting DataGrooming Processing "); + logger.info(" Starting DataGrooming Processing "); if (edgesOnlyFlag) { - LOGGER.info(" NOTE >> Skipping Node processing as requested. Will only process Edges. << "); + logger.info(" NOTE >> Skipping Node processing as requested. Will only process Edges. << "); } else { for (Entry<String, Introspector> entry : entrySet) { @@ -486,7 +485,7 @@ public class DataGrooming { int thisNtCount = 0; int thisNtDeleteCount = 0; - LOGGER.debug(" > Look at : [" + nType + "] ..."); + logger.debug(" > Look at : [" + nType + "] ..."); ntList = ntList + "," + nType; // Get a collection of the names of the key properties for this nodeType to use later @@ -514,7 +513,7 @@ public class DataGrooming { thisNtCount++; if( thisNtCount == lastShownForNt + 250 ){ lastShownForNt = thisNtCount; - LOGGER.debug("count for " + nType + " so far = " + thisNtCount ); + logger.debug("count for " + nType + " so far = " + thisNtCount ); } Vertex thisVtx = iter.next(); if( windowStartTime > 0 ){ @@ -532,7 +531,7 @@ public class DataGrooming { String thisVid = thisVtx.id().toString(); if (processedVertices.contains(thisVid)) { - LOGGER.debug("skipping already processed vertex: " + thisVid); + logger.debug("skipping already processed vertex: " + thisVid); continue; } totalNodeCount++; @@ -602,7 +601,7 @@ public class DataGrooming { } catch (Exception ex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING from inside the for-each-vid-loop orphan-edges-check " + LogFormatTools.getStackTop(ex) ); + logger.warn("WARNING from inside the for-each-vid-loop orphan-edges-check " + LogFormatTools.getStackTop(ex) ); } if (deleteCandidateList.contains(thisVid)) { @@ -616,10 +615,10 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("ERROR trying to delete missing-dep-node VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); + logger.error("ERROR trying to delete missing-dep-node VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); } if (okFlag) { - LOGGER.info(" DELETED missing-dep-node VID = " + thisVid); + logger.info(" DELETED missing-dep-node VID = " + thisVid); } } else { // We count nodes missing their depNodes two ways - the first if it has @@ -668,10 +667,10 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("ERROR trying to delete phantom VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); + logger.error("ERROR trying to delete phantom VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); } if (okFlag) { - LOGGER.info(" DELETED VID = " + thisVid); + logger.info(" DELETED VID = " + thisVid); } } else { ghostNodeHash.put(thisVid, thisVtx); @@ -679,7 +678,7 @@ public class DataGrooming { } else if( (secondGetList.size() > 1) && depNodeOk && !dupeCheckOff ){ // Found some DUPLICATES - need to process them - LOGGER.info(" - now check Dupes for this guy - "); + logger.info(" - now check Dupes for this guy - "); List<String> tmpDupeGroups = checkAndProcessDupes( TRANSID, FROMAPPID, g, source1, version, nType, secondGetList, dupeFixOn, @@ -688,7 +687,7 @@ public class DataGrooming { while (dIter.hasNext()) { // Add in any newly found dupes to our running list String tmpGrp = dIter.next(); - LOGGER.info("Found set of dupes: [" + tmpGrp + "]"); + logger.info("Found set of dupes: [" + tmpGrp + "]"); dupeGroups.add(tmpGrp); } } @@ -696,13 +695,13 @@ public class DataGrooming { catch (AAIException e1) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(" For nodeType = " + nType + " Caught exception", e1); + logger.warn(" For nodeType = " + nType + " Caught exception", e1); errArr.add(e1.getErrorObject().toString()); } catch (Exception e2) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(" For nodeType = " + nType + logger.warn(" For nodeType = " + nType + " Caught exception", e2); errArr.add(e2.getMessage()); } @@ -710,7 +709,7 @@ public class DataGrooming { catch (Exception exx) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING from inside the while-verts-loop ", exx); + logger.warn("WARNING from inside the while-verts-loop ", exx); } } // while loop for each record of a nodeType @@ -728,7 +727,7 @@ public class DataGrooming { Iterator<ArrayList<Vertex>> dsItr = nonDependentDupeSets.iterator(); while( dsItr.hasNext() ){ ArrayList<Vertex> dupeList = dsItr.next(); - LOGGER.info(" - now check Dupes for some non-dependent guys - "); + logger.info(" - now check Dupes for some non-dependent guys - "); List<String> tmpDupeGroups = checkAndProcessDupes( TRANSID, FROMAPPID, g, source1, version, nType, dupeList, dupeFixOn, @@ -737,7 +736,7 @@ public class DataGrooming { while (dIter.hasNext()) { // Add in any newly found dupes to our running list String tmpGrp = dIter.next(); - LOGGER.info("Found set of dupes: [" + tmpGrp + "]"); + logger.info("Found set of dupes: [" + tmpGrp + "]"); dupeGroups.add(tmpGrp); } } @@ -751,7 +750,7 @@ public class DataGrooming { } thisNtDeleteCount = 0; - LOGGER.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total overall. " ); + logger.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total overall. " ); }// While-loop for each node type @@ -769,7 +768,7 @@ public class DataGrooming { // -------------------------------------------------------------------------------------- // To do some strange checking - we need a second graph object - LOGGER.debug(" ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n"); + logger.debug(" ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n"); // Note - graph2 just reads - but we want it to use a fresh connection to // the database, so we are NOT using the CACHED DB CONFIG here. graph2 = TitanFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DataGrooming.class.getSimpleName()).withGraphType("realtime2").buildConfiguration()); @@ -777,7 +776,7 @@ public class DataGrooming { String emsg = "null graph2 object in DataGrooming\n"; throw new AAIException("AAI_6101", emsg); } else { - LOGGER.debug("Got the graph2 object... \n"); + logger.debug("Got the graph2 object... \n"); } g2 = graph2.newTransaction(); if (g2 == null) { @@ -796,7 +795,7 @@ public class DataGrooming { int counter = 0; int lastShown = 0; Iterator<Vertex> vItor2 = vertList.iterator(); - LOGGER.info(" Checking for bad edges --- "); + logger.info(" Checking for bad edges --- "); while (vItor2.hasNext()) { Vertex v = null; @@ -806,7 +805,7 @@ public class DataGrooming { } catch (Exception vex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get next vertex on the vItor2 "); + logger.warn(">>> WARNING trying to get next vertex on the vItor2 "); continue; } @@ -817,12 +816,12 @@ public class DataGrooming { } catch (Exception ev) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING when doing getId() on a vertex from our vertex list. "); + logger.warn("WARNING when doing getId() on a vertex from our vertex list. "); continue; } if (ghostNodeHash.containsKey(thisVertId)) { // This is a phantom node, so don't try to use it - LOGGER.info(" >> Skipping edge check for edges from vertexId = " + logger.info(" >> Skipping edge check for edges from vertexId = " + thisVertId + ", since that guy is a Phantom Node"); continue; @@ -843,7 +842,7 @@ public class DataGrooming { if (counter == lastShown + 250) { lastShown = counter; - LOGGER.info("... Checking edges for vertex # " + logger.info("... Checking edges for vertex # " + counter); } Iterator<Edge> eItor = v.edges(Direction.BOTH); @@ -856,7 +855,7 @@ public class DataGrooming { } catch (Exception iex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get next edge on the eItor ", iex); + logger.warn(">>> WARNING trying to get next edge on the eItor ", iex); continue; } @@ -865,7 +864,7 @@ public class DataGrooming { } catch (Exception err) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get edge's In-vertex ", err); + logger.warn(">>> WARNING trying to get edge's In-vertex ", err); } String vNtI = ""; String vIdI = ""; @@ -892,7 +891,7 @@ public class DataGrooming { if( connectedVert == null ) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong); + logger.warn( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong); cantGetUsingVid = true; // If we can NOT get this ghost with the SECOND graph-object, @@ -904,7 +903,7 @@ public class DataGrooming { catch( Exception ex){ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex); + logger.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex); } if( ghost2 != null ){ ghostNodeHash.put(vIdI, ghost2); @@ -915,7 +914,7 @@ public class DataGrooming { catch (Exception err) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get edge's In-vertex props ", err); + logger.warn(">>> WARNING trying to get edge's In-vertex props ", err); } } if (keysMissing || vIn == null || vNtI.equals("") @@ -944,11 +943,11 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING when trying to delete bad-edge-connected VERTEX VID = " + logger.warn("WARNING when trying to delete bad-edge-connected VERTEX VID = " + vIdI, e1); } if (okFlag) { - LOGGER.info(" DELETED vertex from bad edge = " + logger.info(" DELETED vertex from bad edge = " + vIdI); } } else { @@ -969,11 +968,11 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING when trying to delete edge = " + logger.warn("WARNING when trying to delete edge = " + thisEid); } if (okFlag) { - LOGGER.info(" DELETED edge = " + thisEid); + logger.info(" DELETED edge = " + thisEid); } } } else { @@ -990,7 +989,7 @@ public class DataGrooming { } catch (Exception err) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get edge's Out-vertex "); + logger.warn(">>> WARNING trying to get edge's Out-vertex "); } String vNtO = ""; String vIdO = ""; @@ -1016,7 +1015,7 @@ public class DataGrooming { Vertex connectedVert = g2.traversal().V(vIdLong).next(); if( connectedVert == null ) { cantGetUsingVid = true; - LOGGER.info( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong); + logger.info( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong); // If we can get this ghost with the other graph-object, then get it -- it's still a ghost try { ghost2 = g.traversal().V(vIdLong).next(); @@ -1024,7 +1023,7 @@ public class DataGrooming { catch( Exception ex){ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex); + logger.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex); } if( ghost2 != null ){ ghostNodeHash.put(vIdO, ghost2); @@ -1034,7 +1033,7 @@ public class DataGrooming { } catch (Exception err) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(">>> WARNING trying to get edge's Out-vertex props ", err); + logger.warn(">>> WARNING trying to get edge's Out-vertex props ", err); } } if (keysMissing || vOut == null || vNtO.equals("") @@ -1063,11 +1062,11 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING when trying to delete bad-edge-connected VID = " + logger.warn("WARNING when trying to delete bad-edge-connected VID = " + vIdO, e1); } if (okFlag) { - LOGGER.info(" DELETED vertex from bad edge = " + logger.info(" DELETED vertex from bad edge = " + vIdO); } } else { @@ -1088,11 +1087,11 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING when trying to delete edge = " + logger.warn("WARNING when trying to delete edge = " + thisEid, ex); } if (okFlag) { - LOGGER.info(" DELETED edge = " + thisEid); + logger.info(" DELETED edge = " + thisEid); } } } else { @@ -1107,7 +1106,7 @@ public class DataGrooming { } catch (Exception exx) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn("WARNING from in the while-verts-loop ", exx); + logger.warn("WARNING from in the while-verts-loop ", exx); } }// End of while-vertices-loop (the edge-checking) } // end of -- if we're not skipping the edge-checking @@ -1116,14 +1115,14 @@ public class DataGrooming { deleteCount = deleteCount + dupeGrpsDeleted; if (!singleCommits && deleteCount > 0) { try { - LOGGER.info("About to do the commit for " + logger.info("About to do the commit for " + deleteCount + " removes. "); executeFinalCommit = true; - LOGGER.info("Commit was successful "); + logger.info("Commit was successful "); } catch (Exception excom) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error(" >>>> ERROR <<<< Could not commit changes. " + LogFormatTools.getStackTop(excom)); + logger.error(" >>>> ERROR <<<< Could not commit changes. " + LogFormatTools.getStackTop(excom)); deleteCount = 0; } } @@ -1212,7 +1211,7 @@ public class DataGrooming { } catch (Exception dex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("error trying to print detail info for a ghost-node: " + LogFormatTools.getStackTop(dex)); + logger.error("error trying to print detail info for a ghost-node: " + LogFormatTools.getStackTop(dex)); } } @@ -1236,7 +1235,7 @@ public class DataGrooming { } catch (Exception dex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("error trying to print detail info for a Orphan Node /missing dependent edge " + LogFormatTools.getStackTop(dex)); + logger.error("error trying to print detail info for a Orphan Node /missing dependent edge " + LogFormatTools.getStackTop(dex)); } } @@ -1261,7 +1260,7 @@ public class DataGrooming { } catch (Exception dex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("error trying to print detail info for a node missing its dependent edge but not an orphan " + logger.error("error trying to print detail info for a node missing its dependent edge but not an orphan " + LogFormatTools.getStackTop(dex)); } } @@ -1284,7 +1283,7 @@ public class DataGrooming { } catch (Exception pex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("error trying to print empty/bad vertex data: " + LogFormatTools.getStackTop(pex)); + logger.error("error trying to print empty/bad vertex data: " + LogFormatTools.getStackTop(pex)); } } @@ -1365,7 +1364,7 @@ public class DataGrooming { } catch (Exception dex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("error trying to print duplicate vertex data " + LogFormatTools.getStackTop(dex)); + logger.error("error trying to print duplicate vertex data " + LogFormatTools.getStackTop(dex)); } }// while - work on each group of dupes @@ -1385,8 +1384,8 @@ public class DataGrooming { bw.close(); - LOGGER.info("\n ------------- Done doing all the checks ------------ "); - LOGGER.info("Output will be written to " + fullOutputFileName); + logger.info("\n ------------- Done doing all the checks ------------ "); + logger.info("Output will be written to " + fullOutputFileName); if (cleanupCandidateCount > 0) { // Technically, this is not an error -- but we're throwing this @@ -1398,12 +1397,12 @@ public class DataGrooming { } catch (AAIException e) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("Caught AAIException while grooming data"); + logger.error("Caught AAIException while grooming data"); ErrorLogHelper.logException(e); } catch (Exception ex) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("Caught exception while grooming data"); + logger.error("Caught exception while grooming data"); ErrorLogHelper.logError("AAI_6128", ex.getMessage() + ", resolve and rerun dataGrooming"); } finally { @@ -1413,7 +1412,7 @@ public class DataGrooming { } catch (IOException iox) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - LOGGER.warn("Got an IOException trying to close bufferedWriter() \n", iox); + logger.warn("Got an IOException trying to close bufferedWriter() \n", iox); } } @@ -1429,7 +1428,7 @@ public class DataGrooming { // Don't throw anything because Titan sometimes is just saying that the graph is already closed LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - LOGGER.warn("WARNING from final graphTransaction.rollback()", ex); + logger.warn("WARNING from final graphTransaction.rollback()", ex); } } @@ -1442,7 +1441,7 @@ public class DataGrooming { // Don't throw anything because Titan sometimes is just saying that the graph is already closed LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - LOGGER.warn("WARNING from final graphTransaction2.rollback()", ex); + logger.warn("WARNING from final graphTransaction2.rollback()", ex); } } @@ -1456,7 +1455,7 @@ public class DataGrooming { // Don't throw anything because Titan sometimes is just saying that the graph is already closed{ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - LOGGER.warn("WARNING from final graph.shutdown()", ex); + logger.warn("WARNING from final graph.shutdown()", ex); } try { @@ -1468,7 +1467,7 @@ public class DataGrooming { // Don't throw anything because Titan sometimes is just saying that the graph is already closed{ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR); - LOGGER.warn("WARNING from final graph2.shutdown()", ex); + logger.warn("WARNING from final graph2.shutdown()", ex); } } @@ -1704,7 +1703,7 @@ public class DataGrooming { try { keyProps = loader.introspectorFromName(vtxANodeType).getKeys(); } catch (AAIUnknownObjectException e) { - LOGGER.warn("Required property not found", e); + logger.warn("Required property not found", e); throw new AAIException("AAI_6105", "Required Property name(s) not found for nodeType = " + vtxANodeType + ")"); } @@ -2063,7 +2062,7 @@ public class DataGrooming { } catch (Exception e) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. ", e); + logger.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. ", e); } return returnList; @@ -2191,7 +2190,7 @@ public class DataGrooming { if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("Bad format. Expecting KeepVid=999999"); + logger.error("Bad format. Expecting KeepVid=999999"); return false; } else { String keepVidStr = prefArr[1]; @@ -2221,10 +2220,10 @@ public class DataGrooming { okFlag = false; LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("ERROR trying to delete VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); + logger.error("ERROR trying to delete VID = " + thisVid + " " + LogFormatTools.getStackTop(e)); } if (okFlag) { - LOGGER.info(" DELETED VID = " + thisVid); + logger.info(" DELETED VID = " + thisVid); deletedSomething = true; } } @@ -2232,7 +2231,7 @@ public class DataGrooming { } else { LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error("ERROR - Vertex Id to keep not found in list of dupes. dupeInfoString = [" + logger.error("ERROR - Vertex Id to keep not found in list of dupes. dupeInfoString = [" + dupeInfoString + "]"); return false; } @@ -2313,7 +2312,7 @@ public class DataGrooming { catch( Exception ex ){ LoggingContext.statusCode(StatusCode.ERROR); LoggingContext.responseCode(LoggingContext.DATA_ERROR); - LOGGER.error( " ERROR trying to get node for: [" + propsAndValuesForMsg + "]" + LogFormatTools.getStackTop(ex)); + logger.error( " ERROR trying to get node for: [" + propsAndValuesForMsg + "]" + LogFormatTools.getStackTop(ex)); } if( verts != null ){ @@ -2324,7 +2323,7 @@ public class DataGrooming { } if( retVertList.size() == 0 ){ - LOGGER.debug("DEBUG No node found for nodeType = [" + nodeType + + logger.debug("DEBUG No node found for nodeType = [" + nodeType + "], propsAndVal = " + propsAndValuesForMsg ); } @@ -2563,7 +2562,7 @@ public class DataGrooming { } } catch (Exception e) { - LOGGER.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e); + logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e); } } @@ -2581,7 +2580,7 @@ public class DataGrooming { } } catch (Exception e) { - LOGGER.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e); + logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e); } } @@ -2634,7 +2633,7 @@ public class DataGrooming { if( thisVid.equals(vidAL.toString()) || thisVid.equals(vidBL.toString()) ){ String msg = " vid = " + thisVid + " is one of two that the DB can retrieve directly ------"; //System.out.println(msg); - LOGGER.info(msg); + logger.info(msg); returnVid = thisVid; } } @@ -2642,7 +2641,7 @@ public class DataGrooming { catch ( AAIException ae ){ String emsg = "Error trying to get node just by key " + ae.getMessage(); //System.out.println(emsg); - LOGGER.error(emsg); + logger.error(emsg); } return returnVid; |