aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/main/docker/docker-entrypoint.sh2
-rw-r--r--src/main/java/org/onap/aai/datagrooming/DataGrooming.java265
-rw-r--r--src/main/java/org/onap/aai/db/schema/AuditOXM.java8
-rw-r--r--src/main/java/org/onap/aai/db/schema/AuditorFactory.java5
-rw-r--r--src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java5
-rw-r--r--src/main/java/org/onap/aai/db/schema/ScriptDriver.java6
-rw-r--r--src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java4
-rw-r--r--src/test/java/org/onap/aai/datagrooming/DataGroomingTest.java278
-rw-r--r--src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java378
-rw-r--r--src/test/java/org/onap/aai/db/schema/AuditOXMTest.java60
-rw-r--r--src/test/java/org/onap/aai/dbgen/DupeToolTest.java2
-rw-r--r--src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java2
-rw-r--r--src/test/resources/empty.graphson0
-rw-r--r--src/test/resources/pserver.graphson1
-rw-r--r--src/test/resources/pserver2.graphson.P01
-rw-r--r--src/test/resources/pserver2.graphson.P12
16 files changed, 950 insertions, 69 deletions
diff --git a/src/main/docker/docker-entrypoint.sh b/src/main/docker/docker-entrypoint.sh
index 2f90ce5..79adc2b 100644
--- a/src/main/docker/docker-entrypoint.sh
+++ b/src/main/docker/docker-entrypoint.sh
@@ -40,7 +40,7 @@ if [ $(cat /etc/passwd | grep aaiadmin | wc -l) -eq 0 ]; then
}
fi;
-chown -R aaiadmin:aaiadmin /opt/app /opt/aai/logroot /var/chef
+chown -R aaiadmin:aaiadmin /opt/app /opt/aai/logroot
find /opt/app/ -name "*.sh" -exec chmod +x {} +
if [ -f ${APP_HOME}/aai.sh ]; then
diff --git a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
index 6149dd9..199e704 100644
--- a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
+++ b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
@@ -48,7 +48,6 @@ import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.apache.tinkerpop.gremlin.structure.VertexProperty;
import org.onap.aai.GraphAdminApp;
import org.onap.aai.dbmap.AAIGraph;
-import org.onap.aai.dbmap.AAIGraphConfig;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.introspection.Introspector;
import org.onap.aai.introspection.Loader;
@@ -67,7 +66,9 @@ import org.onap.aai.logging.LoggingContext.StatusCode;
import com.att.eelf.configuration.Configuration;
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
-import org.janusgraph.core.JanusGraphFactory;
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+
import org.janusgraph.core.JanusGraph;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
@@ -82,6 +83,16 @@ public class DataGrooming {
private LoaderFactory loaderFactory;
private SchemaVersions schemaVersions;
+
+ private CommandLineArgs cArgs;
+
+ HashMap<String, Vertex> orphanNodeHash ;
+ HashMap<String, Vertex> missingAaiNtNodeHash ;
+ HashMap<String, Edge> oneArmedEdgeHash ;
+ HashMap<String, Vertex> ghostNodeHash ;
+ ArrayList<String> dupeGroups;
+ Set<String> deleteCandidateList;
+ private int deleteCount = 0;
public DataGrooming(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
this.loaderFactory = loaderFactory;
@@ -91,18 +102,6 @@ public class DataGrooming {
public void execute(String[] args){
String ver = "version"; // Placeholder
- Boolean doAutoFix = false;
- Boolean edgesOnlyFlag = false;
- Boolean dontFixOrphansFlag = false;
- Boolean skipHostCheck = false;
- Boolean singleCommits = false;
- Boolean dupeCheckOff = false;
- Boolean dupeFixOn = false;
- Boolean ghost2CheckOff = false;
- Boolean ghost2FixOn = false;
- Boolean neverUseCache = false;
- Boolean skipEdgeCheckFlag = false;
- Boolean skipIndexUpdateFix = false;
// A value of 0 means that we will not have a time-window -- we will look
// at all nodes of the passed-in nodeType.
@@ -131,7 +130,26 @@ public class DataGrooming {
FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
String dteStr = fd.getDateTime();
- if (args.length > 0) {
+ cArgs = new CommandLineArgs();
+ JCommander jCommander = new JCommander(cArgs, args);
+ jCommander.setProgramName(DataGrooming.class.getSimpleName());
+
+ //Print Defaults
+ LOGGER.info("EdgesOnlyFlag is" + cArgs.edgesOnlyFlag);
+ LOGGER.info("DoAutoFix is" + cArgs.doAutoFix);
+ LOGGER.info("skipHostCheck is" + cArgs.skipHostCheck);
+ LOGGER.info("dontFixOrphansFlag is" + cArgs.dontFixOrphansFlag);
+ LOGGER.info("singleCommits is" + cArgs.singleCommits);
+ LOGGER.info("dupeCheckOff is" + cArgs.dupeCheckOff);
+ LOGGER.info("dupeFixOn is" + cArgs.dupeFixOn);
+ LOGGER.info("ghost2CheckOff is" + cArgs.ghost2CheckOff);
+ LOGGER.info("ghost2FixOn is" + cArgs.ghost2FixOn);
+ LOGGER.info("neverUseCache is" + cArgs.neverUseCache);
+ LOGGER.info("skipEdgeChecks is" + cArgs.skipEdgeCheckFlag);
+ LOGGER.info("skipIndexUpdateFix is" + cArgs.skipIndexUpdateFix);
+ LOGGER.info("maxFix is" + cArgs.maxRecordsToFix);
+
+ /*if (args.length > 0) {
// They passed some arguments in that will affect processing
for (int i = 0; i < args.length; i++) {
String thisArg = args[i];
@@ -241,9 +259,10 @@ public class DataGrooming {
AAISystemExitUtil.systemExitCloseAAIGraph(0);
}
}
- }
+ } */
String windowTag = "FULL";
+ //TODO???
if( timeWindowMinutes > 0 ){
windowTag = "PARTIAL";
}
@@ -267,13 +286,14 @@ public class DataGrooming {
+ prevFileName + "] for cleanup. ");
Boolean finalShutdownFlag = true;
Boolean cacheDbOkFlag = false;
- doTheGrooming(prevFileName, edgesOnlyFlag, dontFixOrphansFlag,
- maxRecordsToFix, groomOutFileName, ver, singleCommits,
- dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
- finalShutdownFlag, cacheDbOkFlag,
- skipEdgeCheckFlag, timeWindowMinutes,
- singleNodeType, skipIndexUpdateFix );
- } else if (doAutoFix) {
+ doTheGrooming(prevFileName, cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag,
+ cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits,
+ cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
+ cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+ cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
+ cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
+
+ } else if (cArgs.doAutoFix) {
// They want us to run the processing twice -- first to look for
// delete candidates, then after
// napping for a while, run it again and delete any candidates
@@ -284,21 +304,21 @@ public class DataGrooming {
LOGGER.info(" First, Call doTheGrooming() to look at what's out there. ");
Boolean finalShutdownFlag = false;
Boolean cacheDbOkFlag = true;
- int fixCandCount = doTheGrooming("", edgesOnlyFlag,
- dontFixOrphansFlag, maxRecordsToFix, groomOutFileName,
- ver, singleCommits, dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
- finalShutdownFlag, cacheDbOkFlag,
- skipEdgeCheckFlag, timeWindowMinutes,
- singleNodeType, skipIndexUpdateFix );
+ int fixCandCount = doTheGrooming("", cArgs.edgesOnlyFlag,
+ cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix, groomOutFileName,
+ ver, cArgs.singleCommits, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
+ cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+ cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
+ cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
if (fixCandCount == 0) {
LOGGER.info(" No fix-Candidates were found by the first pass, so no second/fix-pass is needed. ");
} else {
// We'll sleep a little and then run a fix-pass based on the
// first-run's output file.
try {
- LOGGER.info("About to sleep for " + sleepMinutes
+ LOGGER.info("About to sleep for " + cArgs.sleepMinutes
+ " minutes.");
- int sleepMsec = sleepMinutes * 60 * 1000;
+ int sleepMsec = cArgs.sleepMinutes * 60 * 1000;
Thread.sleep(sleepMsec);
} catch (InterruptedException ie) {
LOGGER.info("\n >>> Sleep Thread has been Interrupted <<< ");
@@ -312,13 +332,13 @@ public class DataGrooming {
+ groomOutFileName + "]");
finalShutdownFlag = true;
cacheDbOkFlag = false;
- doTheGrooming(groomOutFileName, edgesOnlyFlag,
- dontFixOrphansFlag, maxRecordsToFix,
- secondGroomOutFileName, ver, singleCommits,
- dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
- finalShutdownFlag, cacheDbOkFlag,
- skipEdgeCheckFlag, timeWindowMinutes,
- singleNodeType, skipIndexUpdateFix );
+ doTheGrooming(groomOutFileName, cArgs.edgesOnlyFlag,
+ cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix,
+ secondGroomOutFileName, ver, cArgs.singleCommits,
+ cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
+ cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+ cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
+ cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
}
} else {
// Do the grooming - plain vanilla (no fix-it-file, no
@@ -326,16 +346,16 @@ public class DataGrooming {
Boolean finalShutdownFlag = true;
LOGGER.info(" Call doTheGrooming() ");
Boolean cacheDbOkFlag = true;
- if( neverUseCache ){
+ if( cArgs.neverUseCache ){
// They have forbidden us from using a cached db connection.
- cacheDbOkFlag = false;
+ cArgs.cacheDbOkFlag = false;
}
- doTheGrooming("", edgesOnlyFlag, dontFixOrphansFlag,
- maxRecordsToFix, groomOutFileName, ver, singleCommits,
- dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
- finalShutdownFlag, cacheDbOkFlag,
- skipEdgeCheckFlag, timeWindowMinutes,
- singleNodeType, skipIndexUpdateFix );
+ doTheGrooming("", cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag,
+ cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits,
+ cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
+ cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+ cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
+ cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
}
} catch (Exception ex) {
LoggingContext.statusCode(StatusCode.ERROR);
@@ -416,10 +436,10 @@ public class DataGrooming {
BufferedWriter bw = null;
JanusGraph graph = null;
JanusGraph graph2 = null;
- int deleteCount = 0;
+ deleteCount = 0;
int dummyUpdCount = 0;
boolean executeFinalCommit = false;
- Set<String> deleteCandidateList = new LinkedHashSet<>();
+ deleteCandidateList = new LinkedHashSet<>();
Set<String> processedVertices = new LinkedHashSet<>();
Set<String> postCommitRemoveList = new LinkedHashSet<>();
@@ -505,12 +525,12 @@ public class DataGrooming {
ArrayList<String> errArr = new ArrayList<>();
int totalNodeCount = 0;
HashMap<String, String> misMatchedHash = new HashMap<String, String>();
- HashMap<String, Vertex> orphanNodeHash = new HashMap<String, Vertex>();
- HashMap<String, Vertex> missingAaiNtNodeHash = new HashMap<String, Vertex>();
- HashMap<String, Edge> oneArmedEdgeHash = new HashMap<String, Edge>();
+ orphanNodeHash = new HashMap<String, Vertex>();
+ missingAaiNtNodeHash = new HashMap<String, Vertex>();
+ oneArmedEdgeHash = new HashMap<String, Edge>();
HashMap<String, String> emptyVertexHash = new HashMap<String, String>();
- HashMap<String, Vertex> ghostNodeHash = new HashMap<String, Vertex>();
- ArrayList<String> dupeGroups = new ArrayList<>();
+ ghostNodeHash = new HashMap<String, Vertex>();
+ dupeGroups = new ArrayList<>();
Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
@@ -1627,7 +1647,7 @@ public class DataGrooming {
}// end of doTheGrooming()
- private void updateIndexedProps(Vertex thisVtx, String thisVidStr, String nType,
+ public void updateIndexedProps(Vertex thisVtx, String thisVidStr, String nType,
HashMap <String,String>propTypeHash, ArrayList <String> indexedProps) {
// This is a "missing-aai-node-type" scenario.
// Other indexes may also be messed up, so we will update all of them on
@@ -2849,5 +2869,142 @@ public class DataGrooming {
return returnVid;
}// End of findJustOneUsingIndex()
+
+class CommandLineArgs {
+
+
+ @Parameter(names = "--help", help = true)
+ public boolean help;
+
+ @Parameter(names = "-edgesOnly", description = "Check grooming on edges only", arity = 1)
+ public Boolean edgesOnlyFlag = false;
+
+ @Parameter(names = "-autoFix", description = "doautofix", arity = 1)
+ public Boolean doAutoFix = false;
+
+ @Parameter(names = "-skipHostCheck", description = "skipHostCheck", arity = 1)
+ public Boolean skipHostCheck = false;
+
+ @Parameter(names = "-dontFixOrphans", description = "dontFixOrphans", arity = 1)
+ public Boolean dontFixOrphansFlag = false;
+
+ @Parameter(names = "-singleCommits", description = "singleCommits", arity = 1)
+ public Boolean singleCommits = false;
+
+ @Parameter(names = "-dupeCheckOff", description = "dupeCheckOff", arity = 1)
+ public Boolean dupeCheckOff = false;
+
+ @Parameter(names = "-dupeFixOn", description = "dupeFixOn", arity = 1)
+ public Boolean dupeFixOn = false;
+
+ @Parameter(names = "-ghost2CheckOff", description = "ghost2CheckOff", arity = 1)
+ public Boolean ghost2CheckOff = false;
+
+ @Parameter(names = "-ghost2FixOn", description = "ghost2FixOn", arity = 1)
+ public Boolean ghost2FixOn = false;
+
+ @Parameter(names = "-neverUseCache", description = "neverUseCache", arity = 1)
+ public Boolean neverUseCache = false;
+
+ @Parameter(names = "-skipEdgeChecks", description = "skipEdgeChecks", arity = 1)
+ public Boolean skipEdgeCheckFlag = false;
+
+ @Parameter(names = "-skipIndexUpdateFix", description = "skipIndexUpdateFix", arity = 1)
+ public Boolean skipIndexUpdateFix = false;
+
+ @Parameter(names = "-maxFix", description = "maxFix")
+ public int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
+
+ @Parameter(names = "-sleepMinutes", description = "sleepMinutes")
+ public int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
+
+ // A value of 0 means that we will not have a time-window -- we will look
+ // at all nodes of the passed-in nodeType.
+ @Parameter(names = "-timeWindowMinutes", description = "timeWindowMinutes")
+ public int timeWindowMinutes = 0;
+
+ @Parameter(names = "-f", description = "file")
+ public String prevFileName = "";
+
+ @Parameter(names = "-singleNodeType", description = "sleepMinutes")
+ public String singleNodeType = "";
+
+ Boolean finalShutdownFlag = true;
+ Boolean cacheDbOkFlag = true;
+ }
+
+ public HashMap<String, Vertex> getGhostNodeHash() {
+ return ghostNodeHash;
+ }
+
+ public void setGhostNodeHash(HashMap<String, Vertex> ghostNodeHash) {
+ this.ghostNodeHash = ghostNodeHash;
+ }
+
+ public int getGhostNodeCount(){
+ return getGhostNodeHash().size();
+ }
+
+ public HashMap<String, Vertex> getOrphanNodeHash() {
+ return orphanNodeHash;
+ }
+
+ public void setOrphanNodeHash(HashMap<String, Vertex> orphanNodeHash) {
+ this.orphanNodeHash = orphanNodeHash;
+ }
+
+ public int getOrphanNodeCount(){
+ return getOrphanNodeHash().size();
+ }
+
+ public HashMap<String, Vertex> getMissingAaiNtNodeHash() {
+ return missingAaiNtNodeHash;
+ }
+
+ public void setMissingAaiNtNodeHash(HashMap<String, Vertex> missingAaiNtNodeHash) {
+ this.missingAaiNtNodeHash = missingAaiNtNodeHash;
+ }
+
+ public int getMissingAaiNtNodeCount(){
+ return getMissingAaiNtNodeHash().size();
+ }
+
+ public HashMap<String, Edge> getOneArmedEdgeHash() {
+ return oneArmedEdgeHash;
+ }
+
+ public void setOneArmedEdgeHash(HashMap<String, Edge> oneArmedEdgeHash) {
+ this.oneArmedEdgeHash = oneArmedEdgeHash;
+ }
+
+ public int getOneArmedEdgeHashCount(){
+ return getOneArmedEdgeHash().size();
+ }
+
+ public Set<String> getDeleteCandidateList() {
+ return deleteCandidateList;
+ }
+
+ public void setDeleteCandidateList(Set<String> deleteCandidateList) {
+ this.deleteCandidateList = deleteCandidateList;
+ }
+
+ public int getDeleteCount() {
+ return deleteCount;
+ }
+
+ public void setDeleteCount(int deleteCount) {
+ this.deleteCount = deleteCount;
+ }
+
+ public ArrayList<String> getDupeGroups() {
+ return dupeGroups;
+ }
+
+ public void setDupeGroups(ArrayList<String> dupeGroups) {
+ this.dupeGroups = dupeGroups;
+ }
+
+
} \ No newline at end of file
diff --git a/src/main/java/org/onap/aai/db/schema/AuditOXM.java b/src/main/java/org/onap/aai/db/schema/AuditOXM.java
index 417824c..d7f7314 100644
--- a/src/main/java/org/onap/aai/db/schema/AuditOXM.java
+++ b/src/main/java/org/onap/aai/db/schema/AuditOXM.java
@@ -53,10 +53,12 @@ public class AuditOXM extends Auditor {
*
* @param version the version
*/
- public AuditOXM(LoaderFactory loaderFactory, SchemaVersion version) {
+ public AuditOXM(LoaderFactory loaderFactory, SchemaVersion version, EdgeIngestor ingestor) {
Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, version);
Set<String> objectNames = getAllObjects(loader);
+ this.ingestor = ingestor;
+
allObjects = new HashSet<>();
for (String key : objectNames) {
try {
@@ -220,8 +222,4 @@ public class AuditOXM extends Auditor {
public Set<Introspector> getAllIntrospectors() {
return this.allObjects;
}
-
- public void setEdgeIngestor(EdgeIngestor ingestor){
- this.ingestor = ingestor;
- }
}
diff --git a/src/main/java/org/onap/aai/db/schema/AuditorFactory.java b/src/main/java/org/onap/aai/db/schema/AuditorFactory.java
index 6d96f29..40dd047 100644
--- a/src/main/java/org/onap/aai/db/schema/AuditorFactory.java
+++ b/src/main/java/org/onap/aai/db/schema/AuditorFactory.java
@@ -20,6 +20,7 @@
package org.onap.aai.db.schema;
import org.janusgraph.core.JanusGraph;
+import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.introspection.LoaderFactory;
import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.setup.SchemaVersion;
@@ -37,8 +38,8 @@ public class AuditorFactory {
* @param v the v
* @return the OXM auditor
*/
- public Auditor getOXMAuditor (SchemaVersion v) {
- return new AuditOXM(loaderFactory, v);
+ public Auditor getOXMAuditor (SchemaVersion v, EdgeIngestor ingestor) {
+ return new AuditOXM(loaderFactory, v, ingestor);
}
/**
diff --git a/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java b/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
index dccc141..24aac9f 100644
--- a/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
+++ b/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
@@ -26,6 +26,7 @@ import org.janusgraph.core.schema.JanusGraphIndex;
import org.janusgraph.core.schema.JanusGraphManagement;
import org.janusgraph.core.schema.JanusGraphManagement.IndexBuilder;
import org.janusgraph.core.schema.SchemaStatus;
+import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.setup.SchemaVersions;
import org.onap.aai.setup.SchemaVersion;
@@ -49,9 +50,9 @@ public class ManageJanusGraphSchema {
*
* @param graph the graph
*/
- public ManageJanusGraphSchema(final JanusGraph graph, AuditorFactory auditorFactory, SchemaVersions schemaVersions) {
+ public ManageJanusGraphSchema(final JanusGraph graph, AuditorFactory auditorFactory, SchemaVersions schemaVersions, EdgeIngestor edgeIngestor) {
this.graph = graph;
- oxmInfo = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion());
+ oxmInfo = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor);
graphInfo = auditorFactory.getGraphAuditor(graph);
}
diff --git a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java
index dca8e83..ebef01d 100644
--- a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java
+++ b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java
@@ -27,9 +27,9 @@ import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.codehaus.jackson.JsonGenerationException;
import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.edges.EdgeIngestor;
import org.onap.aai.exceptions.AAIException;
import org.onap.aai.setup.SchemaVersions;
-import org.onap.aai.setup.SchemaVersion;
import org.onap.aai.logging.LoggingContext;
import org.onap.aai.logging.LoggingContext.StatusCode;
import org.onap.aai.util.AAIConfig;
@@ -47,7 +47,6 @@ public class ScriptDriver {
* @param args the arguments
* @throws AAIException the AAI exception
* @throws JsonGenerationException the json generation exception
- * @throws JsonMappingException the json mapping exception
* @throws IOException Signals that an I/O exception has occurred.
*/
public static void main (String[] args) throws AAIException, IOException, ConfigurationException {
@@ -76,6 +75,7 @@ public class ScriptDriver {
AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class);
SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+ EdgeIngestor edgeIngestor = ctx.getBean(EdgeIngestor.class);
String config = cArgs.config;
AAIConfig.init();
@@ -94,7 +94,7 @@ public class ScriptDriver {
AuditDoc doc = null;
if ("oxm".equals(cArgs.type)) {
- doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion()).getAuditDoc();
+ doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor).getAuditDoc();
} else if ("graph".equals(cArgs.type)) {
doc = auditorFactory.getGraphAuditor(graph).getAuditDoc();
}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java b/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
index 6563e23..b9fc978 100644
--- a/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
+++ b/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
@@ -52,6 +52,10 @@ public class OneWaySslAuthorization extends AAIContainerFilter implements Contai
public void filter(ContainerRequestContext containerRequestContext) throws IOException
{
+ if(containerRequestContext.getUriInfo().getRequestUri().getPath().matches("^.*/util/echo$")){
+ return;
+ }
+
String basicAuth = containerRequestContext.getHeaderString("Authorization");
List<MediaType> acceptHeaderValues = containerRequestContext.getAcceptableMediaTypes();
diff --git a/src/test/java/org/onap/aai/datagrooming/DataGroomingTest.java b/src/test/java/org/onap/aai/datagrooming/DataGroomingTest.java
new file mode 100644
index 0000000..161702d
--- /dev/null
+++ b/src/test/java/org/onap/aai/datagrooming/DataGroomingTest.java
@@ -0,0 +1,278 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.datagrooming;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.*;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class DataGroomingTest extends AAISetup {
+
+ private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataGroomingTest.class);
+
+ private DataGrooming dataGrooming;
+
+ private Vertex cloudRegionVertex;
+
+ private boolean setUp = false;
+
+ @Before
+ public void setup() {
+ dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
+ // deleteTool.SHOULD_EXIT_VM = false;
+ JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+ boolean success = true;
+ try {
+ GraphTraversalSource g = transaction.traversal();
+ cloudRegionVertex = g.addV().property("aai-node-type", "cloud-region").property("cloud-owner", "test-owner")
+ .property("cloud-region-id", "test-region").property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionVertexDupe = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionDupe3 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionDupe4 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionDupe5 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionVertexBadNode = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner-noregionId").property("source-of-truth", "JUNIT").next();
+
+
+ Vertex cloudRegionVertexBadNode2 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-region", "test-owner-noownerId").property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionVertexBadNode3 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-region", "test-owner-noownerId2").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantGhostNodeNoNT = g.addV().property("tenant-id", "test-owner-tenant-id-1")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex cloudRegionNoNT = g.addV().property("cloud-region", "test-owner-noownerIdnont-1")
+ .property("cloud-owner", "test-owner-noregion-nont2").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoNT = g.addV().property("tenant-id", "test-owner-tenant-id-1")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoKey = g.addV().property("aai-node-type", "tenant").property("source-of-truth", "JUNIT")
+ .next();
+
+ Vertex cloudRegionNoKey = g.addV().property("aai-node-type", "cloud-region")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoParent = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoParent1 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoParentDupe1 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoParentDupe2 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantDupe3 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+ Vertex tenantDupe4 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+
+ Vertex tenantNoParent2 = g.addV().property("aai-node-type", "tenant")
+ .property("tenant-id", "test-owner-tenant-id2").property("source-of-truth", "JUNIT").next();
+
+ tenantNoParent2.property("aai-uuid", tenantNoParent2.id() + "dummy");
+
+ Vertex tenantVertex = g.addV().property("aai-node-type", "tenant").property("tenant-id", "test-tenant")
+ .property("source-of-truth", "JUNIT").next();
+
+ Vertex pserverVertex = g.addV().property("aai-node-type", "pserver").property("hostname", "test-pserver")
+ .property("in-maint", false).property("source-of-truth", "JUNIT").next();
+
+ Vertex azNokey = g.addV().property("aai-node-type", "availability-zone")
+ .property("source-of-truth", "JUNIT").next();
+
+ cloudRegionVertex.addEdge("BadEdge", tenantGhostNodeNoNT, null);
+ edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantVertex);
+ edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantDupe3);
+ edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantDupe4);
+ edgeSerializer.addTreeEdge(g, cloudRegionNoKey, tenantNoKey);
+ edgeSerializer.addEdge(g, pserverVertex, azNokey);
+
+ cloudRegionNoNT.addEdge("Base Edge2", tenantNoNT, null);
+
+ } catch (Exception ex) {
+ success = false;
+ logger.error("Unable to create the vertexes", ex);
+ } finally {
+ if (success) {
+ transaction.commit();
+ } else {
+ transaction.rollback();
+ fail("Unable to setup the graph");
+ }
+ }
+ }
+
+ @Test
+ public void testGroomingNonAutoFix() throws AAIException {
+ String[] args = {
+ "-edgesOnly", "false", "-autoFix ", "false", "-skipHostCheck ", "true", "-dontFixOrphans ", "true"
+ };
+
+ dataGrooming.execute(args);
+ /*
+ * 2 GhostNodes - CloudRegions 1 OrphaNode - tenant
+ */
+ assertThat(dataGrooming.getGhostNodeCount(), is(5));
+ assertThat(dataGrooming.getOrphanNodeCount(), is(5));
+ assertThat(dataGrooming.getMissingAaiNtNodeCount(), is(1));
+ assertThat(dataGrooming.getOneArmedEdgeHashCount(), is(3));
+ }
+
+ @Test
+ public void testGroomingWithAutoFix() throws AAIException {
+ String[] args = {
+ "-autoFix ", "true", "-edgesOnly", "false", "-skipHostCheck ", "false", "-dontFixOrphans ", "false",
+ "-skipIndexUpdateFix", "true", "-sleepMinutes", "1", "-timeWindowMinutes", "100", "-dupeFixOn", "true"
+ };
+
+ dataGrooming.execute(args);
+ assertThat(dataGrooming.getDeleteCandidateList().size(), is(19));
+ assertThat(dataGrooming.getDeleteCount(), is(18));
+ }
+
+ @Test
+ public void testGroomingUpdateIndexedProps() throws AAIException {
+
+ JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+ GraphTraversalSource g = transaction.traversal();
+ Vertex cloudRegionVertex1 = g.addV().property("aai-node-type", "cloud-region")
+ .property("cloud-owner", "test-owner-partial").property("cloud-region-id", "test-region")
+ .property("source-of-truth", "JUNIT").next();
+ dataGrooming.updateIndexedProps(cloudRegionVertex1, "1", "cloud-region", new HashMap<>(), new ArrayList<>());
+ transaction.rollback();
+ // TODO asset something
+ }
+
+ @Test
+ public void testGroomingGettersAndSetters() throws AAIException {
+
+ dataGrooming.setGhostNodeHash(new HashMap<>());
+ dataGrooming.setOrphanNodeHash(new HashMap<>());
+ dataGrooming.setMissingAaiNtNodeHash(new HashMap<>());
+ dataGrooming.setOneArmedEdgeHash(new HashMap<>());
+ dataGrooming.setDeleteCandidateList(new HashSet<>());
+ dataGrooming.setDeleteCount(0);
+
+ assertThat(dataGrooming.getGhostNodeCount(), is(0));
+ assertThat(dataGrooming.getOrphanNodeCount(), is(0));
+ assertThat(dataGrooming.getMissingAaiNtNodeCount(), is(0));
+ assertThat(dataGrooming.getOneArmedEdgeHashCount(), is(0));
+ assertThat(dataGrooming.getDeleteCandidateList().size(), is(0));
+ assertThat(dataGrooming.getDeleteCount(), is(0));
+ }
+
+ @Test
+ public void testGroomingNoArgs() throws AAIException {
+ String[] args = {
+
+ };
+ dataGrooming.execute(args);
+ assertThat(dataGrooming.getGhostNodeCount(), is(5));
+ assertThat(dataGrooming.getOrphanNodeCount(), is(5));
+ assertThat(dataGrooming.getMissingAaiNtNodeCount(), is(1));
+ assertThat(dataGrooming.getOneArmedEdgeHashCount(), is(3));
+ assertThat(dataGrooming.getDeleteCandidateList().size(), is(0));
+ assertThat(dataGrooming.getDeleteCount(), is(0));
+ }
+
+ @Test
+ public void testGroomingDupeCheck() throws AAIException {
+ String[] args = {
+ };
+
+ dataGrooming.execute(args);
+ assertThat(dataGrooming.getDupeGroups().size(), is(2));
+ }
+
+ @Test
+ public void testGroomingAutoFixMaxRecords() throws AAIException {
+
+ String[] args = { "-autoFix ", "true", "-maxFix", "0", "-edgesOnly",
+ "true" , "-sleepMinutes", "1"};
+ dataGrooming.execute(args);
+ assertThat(dataGrooming.getDeleteCandidateList().size(), is(0));
+
+ }
+
+ @After
+ public void tearDown() {
+
+ JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+ boolean success = true;
+ try {
+ GraphTraversalSource g = transaction.traversal();
+ g.V().has("source-of-truth", "JUNIT").toList().forEach(v -> v.remove());
+
+ } catch (Exception ex) {
+ success = false;
+ logger.error("Unable to remove the vertexes", ex);
+ } finally {
+ if (success) {
+ transaction.commit();
+ } else {
+ transaction.rollback();
+ fail("Unable to teardown the graph");
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java b/src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java
new file mode 100644
index 0000000..63fd1fa
--- /dev/null
+++ b/src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java
@@ -0,0 +1,378 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.datasnapshot;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.springframework.boot.test.rule.OutputCapture;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.Matchers.containsString;
+import static org.junit.Assert.assertThat;
+
+public class DataSnapshotTest extends AAISetup {
+
+ private GraphTraversalSource g;
+
+ private JanusGraphTransaction currentTransaction;
+
+ private List<Vertex> vertexes;
+
+ @Rule
+ public OutputCapture outputCapture = new OutputCapture();
+
+ @Before
+ public void setup() throws AAIException {
+ JanusGraph graph = AAIGraph.getInstance().getGraph();
+ currentTransaction = graph.newTransaction();
+ g = currentTransaction.traversal();
+
+ // Setup the graph so it has one pserver vertex
+ vertexes = setupPserverData(g);
+ currentTransaction.commit();
+ }
+
+ @After
+ public void tearDown(){
+
+ JanusGraph graph = AAIGraph.getInstance().getGraph();
+ currentTransaction = graph.newTransaction();
+ g = currentTransaction.traversal();
+
+ vertexes.stream().forEach((v) -> g.V(v).next().remove());
+ currentTransaction.commit();
+ }
+
+ @Test
+ public void testClearEntireDatabaseAndVerifyDataIsRemoved() throws IOException {
+
+ // Copy the pserver.graphson file from src/test/resoures to ${AJSC_HOME}/logs/data/dataSnapshots/ folder
+ String sourceFileName = "src/test/resources/pserver.graphson";
+ String destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver.graphson";
+ copySnapshotFile(sourceFileName,destFileName);
+
+
+ // Run the dataSnapshot to clear the graph
+ String [] args = {"CLEAR_ENTIRE_DATABASE", "pserver.graphson"};
+ DataSnapshot.main(args);
+
+ // Since the code doesn't clear the graph using AAIGraph.getInstance().getGraph(), its creating a second inmemory graph
+ // so we can't verify this with by counting the vertexes and edges in the graph
+ // In the future we could do that but for now we will depend on the following string "All done clearing DB"
+
+ // Capture the standard output and see if the following text is there
+ assertThat(outputCapture.toString(), containsString("All done clearing DB"));
+ }
+
+
+ @Test
+ public void testClearEntireDatabaseWithEmptyGraphSONFileAndItShouldNotClearDatabase() throws IOException {
+
+ // Create a empty file called empty.graphson in src/test/resources/
+
+ // Copy that file to ${AJSC_HOME}/logs/data/dataSnapshots/
+ String sourceFileName = "src/test/resources/empty.graphson";
+ String destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/empty.graphson";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"CLEAR_ENTIRE_DATABASE", "empty.graphson"};
+ DataSnapshot.main(args);
+
+ // Capture the standard output and see if the following text had no data is there
+ // Since the graphson is empty it should output that and not clear the graph
+ // Uncomment the following line after the test changes are done
+ assertThat(outputCapture.toString(), containsString("graphson had no data."));
+ }
+
+ @Test
+ public void testTakeSnapshotAndItShouldCreateASnapshotFileWithOneVertex() throws IOException, InterruptedException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"JUST_TAKE_SNAPSHOT"};
+
+ DataSnapshot.main(args);
+
+ // Add sleep so the file actually gets created with the data
+
+ Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+
+ assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size()+1));
+ postSnapshotFiles.removeAll(preSnapshotFiles);
+ List<Path> snapshotPathList = postSnapshotFiles.stream().collect(Collectors.toList());
+
+ assertThat(snapshotPathList.size(), is(1));
+
+ List<String> fileContents = Files.readAllLines(snapshotPathList.get(0));
+ assertThat(fileContents.get(0), containsString("id"));
+ }
+
+ @Test
+ public void testTakeSnapshotMultiAndItShouldCreateMultipleSnapshotFiles() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "2"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithDebugAndItShouldCreateMultipleSnapshotFiles() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "2", "DEBUG"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+
+ @Test
+ public void testTakeSnapshotMultiWithDebugAndInvalidNumberAndItShouldFail() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "foo", "DEBUG"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithDebugAndTimeDelayAndInvalidNumberAndItShouldFail() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "foo", "DEBUG", "100"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithDebugAndTimeDelayAndZeroThreadsAndItShouldFail() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "100"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithDebugAndTimeDelayIsInvalidNumberAndItShouldFail() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "foo"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithMoreParametersThanAllowedAndItShouldFail() throws IOException {
+
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "foo", "bar"};
+
+ DataSnapshot.main(args);
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithZeroThreadsAndItShouldFail(){
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "0"};
+
+ DataSnapshot.main(args);
+ }
+
+ @Test
+ public void testTakeSnapshotMultiWithInvalidNumberForThreadsAndItShouldFail(){
+
+ // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+ // would need to add more data to the janusgraph
+ String logsFolder = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+ // Run the clear dataSnapshot and this time it should fail
+ String [] args = {"THREADED_SNAPSHOT", "foo"};
+
+ DataSnapshot.main(args);
+ }
+
+ @Test
+ public void testReloadDataAndVerifyDataInGraphMatchesGraphson() throws IOException {
+
+ // Create a graphson file that contains a couple of vertexes in src/test/resources
+ // Copy that file to ${AJSC_HOME}/logs/data/dataSnasphots/
+ // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+
+ // After reload remove the added vertexes in the graph
+ // The reason for this so each test is independent
+ // as there shouldn't be dependencies and cause weird issues
+ String sourceFileName = "src/test/resources/pserver.graphson";
+ String destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver.graphson";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ String [] args = {"RELOAD_DATA", "pserver.graphson"};
+
+ DataSnapshot.main(args);
+ }
+
+ @Test
+ public void testMultiReloadDataAndVerifyDataInGraphMatchesGraphson() throws IOException {
+
+ // Create multiple graphson files that contains a couple of vertexes in src/test/resources
+ // Copy those files to ${AJSC_HOME}/logs/data/dataSnasphots/
+ // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+ String sourceFileName = "src/test/resources/pserver2.graphson.P0";
+ String destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P0";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ sourceFileName = "src/test/resources/pserver2.graphson.P1";
+ destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P1";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ // After reload remove the added vertexes in the graph
+ // The reason for this so each test is independent
+ // as there shouldn't be dependencies and cause weird issues
+ String [] args = {"MULTITHREAD_RELOAD", "pserver2.graphson"};
+
+ DataSnapshot.main(args);
+ }
+
+ @Test
+ public void testMultiReloadDataWithNonExistentFilesAndItShouldFail() throws IOException {
+
+ // After reload remove the added vertexes in the graph
+ // The reason for this so each test is independent
+ // as there shouldn't be dependencies and cause weird issues
+ String [] args = {"MULTITHREAD_RELOAD", "emptyfoo2.graphson"};
+
+ DataSnapshot.main(args);
+ }
+
+ @Test
+ public void testReloadMultiDataAndVerifyDataInGraphMatchesGraphson() throws IOException {
+
+ // Create multiple graphson files that contains a couple of vertexes in src/test/resources
+ // Copy those files to ${AJSC_HOME}/logs/data/dataSnasphots/
+ // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+ String sourceFileName = "src/test/resources/pserver2.graphson.P0";
+ String destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P0";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ sourceFileName = "src/test/resources/pserver2.graphson.P1";
+ destFileName = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P1";
+ copySnapshotFile(sourceFileName,destFileName);
+
+ // After reload remove the added vertexes in the graph
+ // The reason for this so each test is independent
+ // as there shouldn't be dependencies and cause weird issues
+ String [] args = {"RELOAD_DATA_MULTI", "pserver2.graphson"};
+
+ DataSnapshot.main(args);
+ }
+
+ private List<Vertex> setupPserverData(GraphTraversalSource g) throws AAIException {
+ Vertex v1 = g.addV().property("aai-node-type", "pserver")
+ .property("hostname", "somerandomhostname")
+ .next();
+ List<Vertex> list = new ArrayList<>();
+ list.add(v1);
+ Vertex v2 = g.addV().property("aai-node-type", "pserver")
+ .property("hostname", "somerandomhostname2")
+ .next();
+ Vertex pinterface = g.addV()
+ .property("aai-node-type", "p-interface")
+ .property("interface-name", "p-interface-name")
+ .property("in-maint", false)
+ .property("source-of-truth", "JUNIT")
+ .next();
+ edgeSerializer.addTreeEdge(g, v2, pinterface);
+ list.add(v2);
+ return list;
+ }
+
+ private void copySnapshotFile(String sourceFileName, String destFileName) throws IOException {
+
+ File inputFile = new File(sourceFileName);
+ File outputFile = new File(destFileName);
+
+ FileUtils.copyFile(inputFile, outputFile);
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/onap/aai/db/schema/AuditOXMTest.java b/src/test/java/org/onap/aai/db/schema/AuditOXMTest.java
new file mode 100644
index 0000000..cba202a
--- /dev/null
+++ b/src/test/java/org/onap/aai/db/schema/AuditOXMTest.java
@@ -0,0 +1,60 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.onap.aai.AAISetup;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersion;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.AdditionalMatchers.not;
+
+public class AuditOXMTest extends AAISetup {
+
+ private AuditOXM auditOXM;
+
+ @Before
+ public void setUp() {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void getAllIntrospectors() {
+ auditOXM = new AuditOXM(loaderFactory, schemaVersions.getDefaultVersion(), edgeIngestor);
+ assertTrue(auditOXM.getAllIntrospectors().size() > 0);
+ }
+
+ @Test
+ public void setEdgeIngestor() {
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/onap/aai/dbgen/DupeToolTest.java b/src/test/java/org/onap/aai/dbgen/DupeToolTest.java
index 392ce97..1d3228e 100644
--- a/src/test/java/org/onap/aai/dbgen/DupeToolTest.java
+++ b/src/test/java/org/onap/aai/dbgen/DupeToolTest.java
@@ -100,7 +100,7 @@ public class DupeToolTest extends AAISetup {
}
@Test
- public void testDupeTool(){
+ public void testDupeToolForPInterface(){
//TODO: test does not find duplicates
String[] args = {
"-userId", "testuser",
diff --git a/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java b/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java
index ddaad21..6d62098 100644
--- a/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java
+++ b/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java
@@ -98,7 +98,7 @@ public class ManageSchemaTest extends AAISetup {
" } ]\r\n" +
" }";
DBIndex index = mapper.readValue(content, DBIndex.class);
- ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph, auditorFactory, schemaVersions);
+ ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph, auditorFactory, schemaVersions, edgeIngestor);
JanusGraphManagement mgmt = graph.openManagement();
Set<String> instances = mgmt.getOpenInstances();
System.out.println(instances);
diff --git a/src/test/resources/empty.graphson b/src/test/resources/empty.graphson
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/test/resources/empty.graphson
diff --git a/src/test/resources/pserver.graphson b/src/test/resources/pserver.graphson
new file mode 100644
index 0000000..8746d13
--- /dev/null
+++ b/src/test/resources/pserver.graphson
@@ -0,0 +1 @@
+{"id":196616,"label":"vertex","properties":{"ptnii-equip-name":[{"id":"5c01-47pk-4xs5","value":"somename"}],"aai-last-mod-ts":[{"id":"5ce9-47pk-5u6d","value":1476896662}],"equip-type":[{"id":"5csh-47pk-34zp","value":"server"}],"equip-vendor":[{"id":"5d6p-47pk-35s5","value":"HP"}],"fqdn":[{"id":"5dkx-47pk-50xx","value":"mmsksrsv113.mmsks.sbcglobal.net"}],"purpose":[{"id":"5dz5-47pk-56h1","value":"ControlPlane"}],"aai-created-ts":[{"id":"5edd-47pk-5rt1","value":1476495449}],"ipv4-oam-address":[{"id":"5erl-47pk-1udh","value":"12.80.38.87"}],"source-of-truth":[{"id":"5f5t-47pk-5uyt","value":"RCT"}],"aai-node-type":[{"id":"5fk1-47pk-5q85","value":"pserver"}],"hostname":[{"id":"5fy9-47pk-4w79","value":"someval"}],"in-maint":[{"id":"5gch-47pk-229x","value":false}],"equip-model":[{"id":"5gqp-47pk-36kl","value":"some-val"}],"resource-version":[{"id":"5h4x-47pk-25fp","value":"1476896662"}],"last-mod-source-of-truth":[{"id":"5hj5-47pk-5pfp","value":"somename"}]}}
diff --git a/src/test/resources/pserver2.graphson.P0 b/src/test/resources/pserver2.graphson.P0
new file mode 100644
index 0000000..59c9a7d
--- /dev/null
+++ b/src/test/resources/pserver2.graphson.P0
@@ -0,0 +1 @@
+{"id":4240,"label":"vertex","properties":{"hostname":[{"id":"sy-39s-c3d1","value":"somerandomhostname"}],"aai-node-type":[{"id":"176-39s-5xc5","value":"pserver"}]}}
diff --git a/src/test/resources/pserver2.graphson.P1 b/src/test/resources/pserver2.graphson.P1
new file mode 100644
index 0000000..5a0ece5
--- /dev/null
+++ b/src/test/resources/pserver2.graphson.P1
@@ -0,0 +1,2 @@
+{"id":8336,"label":"vertex","inE":{"tosca.relationships.network.BindsTo":[{"id":"4r6-9lc-f11-6fk","outV":12432,"properties":{"private":false,"aai-uuid":"42904b97-7d39-485d-8ac4-6a8022d714f2","prevent-delete":"NONE","delete-other-v":"IN","contains-other-v":"IN"}}]},"properties":{"hostname":[{"id":"1zm-6fk-c3d1","value":"somerandomhostname2"}],"aai-node-type":[{"id":"2du-6fk-5xc5","value":"pserver"}]}}
+{"id":12432,"label":"vertex","outE":{"tosca.relationships.network.BindsTo":[{"id":"4r6-9lc-f11-6fk","inV":8336,"properties":{"private":false,"aai-uuid":"42904b97-7d39-485d-8ac4-6a8022d714f2","prevent-delete":"NONE","delete-other-v":"IN","contains-other-v":"IN"}}]},"properties":{"in-maint":[{"id":"36a-9lc-2685","value":false}],"interface-name":[{"id":"3ki-9lc-3u9x","value":"p-interface-name"}],"source-of-truth":[{"id":"3yq-9lc-622t","value":"JUNIT"}],"aai-node-type":[{"id":"4cy-9lc-5xc5","value":"p-interface"}]}}