diff options
Diffstat (limited to 'src/main/java/org/onap')
36 files changed, 2204 insertions, 354 deletions
diff --git a/src/main/java/org/onap/aai/GraphAdminApp.java b/src/main/java/org/onap/aai/GraphAdminApp.java index 3f7abf3..1339604 100644 --- a/src/main/java/org/onap/aai/GraphAdminApp.java +++ b/src/main/java/org/onap/aai/GraphAdminApp.java @@ -19,16 +19,21 @@ */ package org.onap.aai; +import com.att.eelf.configuration.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.commons.lang3.exception.ExceptionUtils; import org.onap.aai.aailog.logs.AaiDebugLog; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; +import java.util.Properties; import org.onap.aai.exceptions.AAIException; import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.logging.LogFormatTools; import org.onap.aai.nodes.NodeIngestor; import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.AAIConstants; import org.onap.aai.util.ExceptionTranslator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; @@ -36,6 +41,7 @@ import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.EnableAspectJAutoProxy; import org.springframework.core.env.Environment; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; @@ -61,10 +67,13 @@ import java.util.UUID; "org.onap.aai.datagrooming", "org.onap.aai.dataexport", "org.onap.aai.datacleanup", - "org.onap.aai.aailog" + "org.onap.aai.aailog", + "org.onap.aai.failover", + "org.onap.aai.audit" }) @EnableAsync @EnableScheduling +@EnableAspectJAutoProxy @EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class}) public class GraphAdminApp { diff --git a/src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java b/src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java new file mode 100644 index 0000000..ff29157 --- /dev/null +++ b/src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java @@ -0,0 +1,605 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.aai.audit; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.javatuples.Triplet; +import org.onap.aai.restclient.PropertyPasswordConfiguration; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.edges.EdgeRule; +import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.introspection.Loader; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.introspection.ModelType; +import org.onap.aai.logging.ErrorLogHelper; +import org.onap.aai.rest.client.ApertureService; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConfig; +import org.onap.aai.util.AAIConstants; +import org.onap.aai.util.ExceptionTranslator; +import org.onap.aai.util.FormatDate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.MDC; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.stream.Collectors; + +import com.att.eelf.configuration.Configuration; +import org.springframework.stereotype.Component; + +@Component +public class AuditGraphson2Sql { + + private EdgeIngestor ei; + private Loader loader; + private static final Logger LOGGER = LoggerFactory.getLogger(AuditGraphson2Sql.class.getSimpleName()); + private SchemaVersions schemaVersions; + private ApertureService apertureService; + + public static final String DEFAULT_SRC_DIR = "logs/data/dataSnapshots/"; + public static final int DEFAULT_THRESHOLD_PERCENT = 10; + public static final String DEFAULT_OUTPUT_DIR = "logs/data/audit"; + + //DEBUG -- should be getting default-src-dir, default-output-dir and rdbms-db-name from param file + @Autowired + public AuditGraphson2Sql( EdgeIngestor ei, SchemaVersions schemaVersions, LoaderFactory loaderFactory, ApertureService apertureService) { + this.schemaVersions = schemaVersions; + this.loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion()); + this.ei = ei; + this.apertureService = apertureService; + } + + public String runAudit(String dbName, String sourceFName, String sourceDir ) + throws Exception { + + if( sourceDir == null || sourceDir.isEmpty() ){ + sourceDir = DEFAULT_SRC_DIR; + } + + HashMap <String,Integer> gsonTableCounts = new HashMap <> (); + try { + gsonTableCounts = getCountsFromGraphsonSnapshot(dbName, sourceFName, sourceDir); + } catch( Exception e ) { + LOGGER.error(" ERROR when calling getCountsFromGraphsonSnapshot(" + + dbName + "," + + sourceFName + "). Exception = " + + e.getMessage() ); + throw e; + } + + long timestamp = 0L; + try { + timestamp = getTimestampFromFileName(sourceFName); + } catch( Exception e ) { + LOGGER.error(" ERROR getting timestamp from filename: " + + e.getMessage() ); + throw e; + } + + JsonObject rdbmsJsonResults = new JsonObject (); + try { + rdbmsJsonResults = getRdbmsTableCounts(timestamp, dbName); + } catch( Exception e ) { + LOGGER.error(" ERROR getting table count info from mySql. timestamp = [" + + timestamp + "], dbName = [" + dbName + + "]. Exception = " + e.getMessage() ); + throw e; + } + HashMap <String,Integer> sqlNodeCounts = getNodeCountsFromSQLRes(rdbmsJsonResults); + HashMap <String,Integer> sqlEdgeCounts = getEdgeCountsFromSQLRes(rdbmsJsonResults); + + String resultString = "Audit ran and logged to file: "; + try { + String titleInfo = "Comparing data from GraphSon file: " + + sourceFName + ", and Aperture data using timeStamp = [" + + timestamp + "] "; + String outFileName = compareResults(gsonTableCounts, sqlNodeCounts, + sqlEdgeCounts, DEFAULT_OUTPUT_DIR, titleInfo); + resultString = resultString + outFileName; + } + catch( IOException ie) { + LOGGER.error(" ERROR writing to output file. [" + ie.getMessage() + "]" ); + throw new Exception( ie.getMessage() ); + } + + return resultString; + + } + + + public String compareResults(HashMap <String,Integer> gsonTableCounts, + HashMap <String,Integer> sqlNodeCounts, + HashMap <String,Integer> sqlEdgeCounts, + String outputDir, String titleInfo) + throws IOException { + + FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT"); + String dteStr = fd.getDateTime(); + String fName = outputDir + "/" + "gson-sql-audit" + dteStr; + File auditOutFile = new File(fName); + auditOutFile.getParentFile().mkdirs(); + BufferedWriter outWriter = new BufferedWriter(new FileWriter(auditOutFile)); + outWriter.write(titleInfo); + outWriter.newLine(); + outWriter.write("Totals from Graphson: " + getGsonTotals(gsonTableCounts)); + outWriter.newLine(); + outWriter.write("Totals from mySql: " + getMSqlTotals(sqlNodeCounts,sqlEdgeCounts)); + outWriter.newLine(); + outWriter.newLine(); + + for (Map.Entry<String, Integer> entry : gsonTableCounts.entrySet()) { + String tblKey = entry.getKey(); + int gVal = entry.getValue(); + if(!sqlNodeCounts.containsKey(tblKey) && !sqlEdgeCounts.containsKey(tblKey)) { + String msg = "> Entry for Table: [" + tblKey + + "] not found in the SQL db but had " + + gVal + " entries in the GraphSon db"; + LOGGER.error(msg); + System.out.println(msg); + outWriter.write(msg); + outWriter.newLine(); + continue; + } + int sVal = 0; + if( sqlNodeCounts.containsKey(tblKey) ) { + sVal = sqlNodeCounts.get(tblKey); + }else { + sVal = sqlEdgeCounts.get(tblKey); + } + if ((gVal > 0) && (sVal == 0)){ + String msg = "For Table [" + tblKey + "], GSon count = " + + gVal + ", zero found in SQL db."; + LOGGER.error(msg); + System.out.println(msg); + outWriter.write(msg); + outWriter.newLine(); + } + String msg = tblKey + ": gson/sql = " + gVal + "/" + sVal; + LOGGER.debug(msg); + System.out.println(msg); + outWriter.write(msg); + outWriter.newLine(); + } + for (Map.Entry<String, Integer> entry : sqlNodeCounts.entrySet()) { + // check for Node types that are no longer on the Graphson side, but are + // still in the SQL DB + String tblKey = entry.getKey(); + int sVal = entry.getValue(); + if(!gsonTableCounts.containsKey(tblKey)) { + String msg = " Entry for Table [" + tblKey + + "] not found in the Graphson db, but had " + + sVal + " entries in the SQL db."; + LOGGER.error(msg); + System.out.println(msg); + outWriter.write(msg); + outWriter.newLine(); + continue; + } + } + for (Map.Entry<String, Integer> entry : sqlEdgeCounts.entrySet()) { + // check for Edge+Label combos that are no longer on the Graphson side, but are + // still in the SQL DB + String tblKey = entry.getKey(); + int sVal = entry.getValue(); + if(!gsonTableCounts.containsKey(tblKey)) { + String msg = " Entry for edge+Label combo [" + tblKey + + "] not found in the Graphson db, but had " + + sVal + " entries in the SQL db."; + LOGGER.error(msg); + System.out.println(msg); + outWriter.write(msg); + outWriter.newLine(); + continue; + } + } + outWriter.close(); + String msg = "Audit Results written to: " + fName; + LOGGER.debug(msg); + System.out.println(msg); + return fName; + + } + + + public HashMap <String,Integer> getNodeCountsFromSQLRes(JsonObject sqlJsonData) { + + HashMap<String,Integer> nodeCtHash = new HashMap<>(); + if( sqlJsonData != null ) { + for (Object e : sqlJsonData.entrySet()) { + Map.Entry entry = (Map.Entry) e; + String tableName = String.valueOf(entry.getKey()); + if (!tableName.startsWith("edge__")) { + nodeCtHash.put(String.valueOf(entry.getKey()), Integer.parseInt(entry.getValue().toString())); + } + } + } + return nodeCtHash; + } + + + public HashMap <String,Integer> getEdgeCountsFromSQLRes(JsonObject sqlJsonData){ + + HashMap<String,Integer> edgeCtHash = new HashMap<>(); + if( sqlJsonData != null ) { + for (Object e : sqlJsonData.entrySet()) { + Map.Entry entry = (Map.Entry) e; + String tableName = String.valueOf(entry.getKey()); + if (tableName.startsWith("edge__")) { + edgeCtHash.put(String.valueOf(entry.getKey()), Integer.parseInt(entry.getValue().toString())); + + } + } + } + return edgeCtHash; + } + + + public JsonObject getRdbmsTableCounts(long timestamp, String dbName) + throws Exception { + + return apertureService.runAudit(timestamp, dbName); + } + + + public String getGsonTotals(HashMap <String,Integer> tableCounts){ + int nodeCount = 0; + int edgeCount = 0; + for (Map.Entry<String, Integer> entry : tableCounts.entrySet()) { + String tblKey = entry.getKey(); + if( tblKey != null && tblKey.startsWith("edge__")){ + edgeCount = edgeCount + entry.getValue(); + } else { + nodeCount = nodeCount + entry.getValue(); + } + } + String countStr = " nodes = " + nodeCount + ", edges = " + edgeCount; + return countStr; + } + + public String getMSqlTotals(HashMap <String,Integer> nodeCounts, + HashMap <String,Integer> edgeCounts){ + int nodeCount = 0; + int edgeCount = 0; + for (Map.Entry<String, Integer> entry : nodeCounts.entrySet()) { + nodeCount = nodeCount + entry.getValue(); + } + for (Map.Entry<String, Integer> entry : edgeCounts.entrySet()) { + edgeCount = edgeCount + entry.getValue(); + } + String countStr = " nodes = " + nodeCount + ", edges = " + edgeCount; + return countStr; + } + + public Long getTimestampFromFileName(String fName) throws Exception { + // Note -- we are expecting filenames that look like our + // current snapshot filenames (without the ".PXX" suffix) + // Ie. dataSnapshot.graphSON.201908151845 + + String datePiece = getDateTimePieceOfFileName(fName); + final SimpleDateFormat dateFormat = new SimpleDateFormat("YYYYMMddHHmm"); + Date date = null; + try { + date = dateFormat.parse(datePiece); + } + catch (ParseException pe) { + throw new Exception ("Error trying to parse this to a Date-Timestamp [" + + datePiece + "]: " + pe.getMessage() ); + } + final long timestamp = date.getTime(); + return timestamp; + + } + + + public String getDateTimePieceOfFileName(String fName) throws Exception { + // Note -- we are expecting filenames that look like our + // current snapshot filenames (without the ".PXX" suffix) + // Ie. dataSnapshot.graphSON.201908151845 + + if( fName == null || fName.isEmpty() || fName.length() < 12) { + throw new Exception ("File name must end with .yyyymmddhhmm "); + } + int index = fName.lastIndexOf('.'); + + if ( index == -1 ) { + throw new Exception ("File name must end with .yyyymmddhhmm "); + } + index++; + if(fName.length() <= index) { + throw new Exception ("File name must end with .yyyymmddhhmm "); + } + String datePiece = fName.substring(index); + if( datePiece.length() != 12 ) { + throw new Exception ("File name must end date in the format .yyyymmddhhmm "); + } + + return datePiece; + } + + + public Map<String, String> getEdgeMapKeys() throws EdgeRuleNotFoundException { + + int edKeyCount = 0; + Map<String, String> edgeKeys = new HashMap<>(); + // EdgeKey will look like: nodeTypeA__nodeTypeB + // The value will be the key with "edge__" pre-pended + + for (Map.Entry<String, Collection<EdgeRule>> rulePairings : ei.getAllCurrentRules().asMap().entrySet()) { + for (EdgeRule er : rulePairings.getValue()) { + String a = er.getTo(); + String b = er.getFrom(); + if (a.compareTo(b) >= 0) { + er.flipDirection(); + } + if (!loader.getAllObjects().containsKey(er.getTo()) || !loader.getAllObjects().containsKey(er.getFrom())) { + // we don't have one of these nodeTypes, so skip this entry + continue; + } + + final String to = er.getTo().replace("-", "_"); + final String from = er.getFrom().replace("-", "_"); + final String edKey = from + "__" + to; + + if (edgeKeys.containsKey(edKey)) { + continue; + } + edKeyCount++; + edgeKeys.put(edKey, "edge__"+ edKey); + + } + } + System.out.println("DEBUG --> There are " + edKeyCount + " edge table keys defined. " ); + LOGGER.debug(" -- There are " + edKeyCount + " edge table keys defined. " ); + return edgeKeys; + } + + + public HashMap <String,Integer> getCountsFromGraphsonSnapshot(String databaseName, + String fileNamePart, String srcDir) throws Exception { + + final JsonParser jsonParser = new JsonParser(); + final Set<String> uris = new HashSet<>(); + final Set<String> uuids = new HashSet<>(); + final Map<Long, String> idToUri = new HashMap<>(); + final Map<Long, String> idToType = new HashMap<>(); + final Map<Long, String> idToUuid = new HashMap<>(); + final Set<Triplet<Long, Long, String>> idIdLabelOfEdges = new HashSet<>(); + + final HashMap<String,Integer> tableCountHash = new HashMap<>(); + + + String sourceDir = DEFAULT_SRC_DIR; // Default for now + if( srcDir != null && srcDir.length() > 1 ) { + // If they passed one in, then we'll use it. + sourceDir = srcDir; + } + String graphsonDir = sourceDir + "/"; + + if( fileNamePart == null || fileNamePart.trim().isEmpty() ){ + String msg = "ERROR -- fileName is required to be passed in. "; + LOGGER.error(msg); + System.out.println(msg); + throw new Exception(msg); + } + + final List<File> graphsons = Files.walk(Paths.get(graphsonDir)) + .filter(Files::isRegularFile) + .map(Path::toFile) + .sorted() + .collect(Collectors.toList()); + + int skippedNodeCount = 0; + int nodeCounter = 0; + int edgeCounter = 0; + for (File graphson : graphsons) { + if( !graphson.getName().contains(fileNamePart) ) { + continue; + } + + try (BufferedReader reader = new BufferedReader(new FileReader(graphson))) { + String msg = "Processing snapshot file " + graphson.getName(); + LOGGER.debug(msg); + System.out.println(msg); + String line; + + while ((line = reader.readLine()) != null) { + JsonObject vertex = jsonParser.parse(line).getAsJsonObject(); + long id = vertex.get("id").getAsLong(); + + if ((vertex.get("properties") == null) || + !vertex.get("properties").getAsJsonObject().has("aai-uri") || + !vertex.get("properties").getAsJsonObject().has("aai-node-type") || + !vertex.get("properties").getAsJsonObject().has("aai-uuid")) { + + msg = "DEBUG -- Could not find keys for this line: [" + + line + "] ------"; + LOGGER.debug(msg); + System.out.println(msg); + skippedNodeCount++; + continue; + } + + String uri = vertex.get("properties").getAsJsonObject().get("aai-uri").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString(); + String nodeType = vertex.get("properties").getAsJsonObject().get("aai-node-type").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString(); + String nodeTypeKey = nodeType.replaceAll("-", "_"); + String uuid = vertex.get("properties").getAsJsonObject().get("aai-uuid").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString(); + + try { + loader.introspectorFromName(nodeType); + } catch (Exception e) { + msg = "DEBUG -- loader introspector for nodeType error: [" + + e.getMessage() + "], [" + e.getLocalizedMessage() + "]------"; + LOGGER.debug(msg); + System.out.println(msg); + skippedNodeCount++; + continue; + } + + if (uris.contains(uri)) { + msg = "DEBUG -- SKIP Uri because it has been seen before: [" + + uri + "] ------"; + LOGGER.debug(msg); + System.out.println(msg); + skippedNodeCount++; + continue; + } + else if (uuids.contains(uuid)) { + msg = "DEBUG -- SKIP UUID because it has been seen before: [" + + uuid + "] ------"; + LOGGER.debug(msg); + System.out.println(msg); + skippedNodeCount++; + continue; + } + + uris.add(uri); + uuids.add(uuid); + idToUri.put(id, uri); + idToType.put(id, nodeType); + idToUuid.put(id, uuid); + + // Collect Edge Info for this node + if (vertex.has("inE")) { + vertex.get("inE").getAsJsonObject().entrySet().forEach(es -> { + String label = es.getKey(); + es.getValue().getAsJsonArray().forEach(e -> { + long otherId = e.getAsJsonObject().get("outV").getAsLong(); + idIdLabelOfEdges.add(new Triplet<>(id, otherId, label)); + }); + }); + } + + if( !tableCountHash.containsKey(nodeTypeKey)) { + int ct = 1; + tableCountHash.put(nodeTypeKey, ct); + } + else { + int tmpCt = tableCountHash.get(nodeTypeKey); + tmpCt++; + tableCountHash.remove(nodeTypeKey); + tableCountHash.put(nodeTypeKey, tmpCt); + } + nodeCounter++; + }// end of looping over this file + } catch (IOException e) { + String msg = "DEBUG -- Error while processing nodes ------"; + LOGGER.debug(msg); + System.out.println(msg); + e.printStackTrace(); + } + }// End of looping over all files + + String msg = "DEBUG -- Found this many Kinds of nodes: " + tableCountHash.size(); + LOGGER.debug(msg); + System.out.println(msg); + + msg = "DEBUG -- Found this many total nodes: " + nodeCounter; + LOGGER.debug(msg); + System.out.println(msg); + + msg = " >> Skipped a total of " + skippedNodeCount + " Node Records ------"; + LOGGER.debug(msg); + System.out.println(msg); + + + msg = "DEBUG -- Begin Processing Edges ------"; + LOGGER.debug(msg); + System.out.println(msg); + + int edgeTableCounter = 0; + int edgeSkipCounter = 0; + + Map<String, String> edgeKeys = this.getEdgeMapKeys(); + for (Triplet<Long, Long, String> edge : idIdLabelOfEdges) { + if (!idToType.containsKey(edge.getValue0())) { + LOGGER.info(" Edge Skipped because ID not found: [" + edge.getValue0() + "]"); + System.out.println(" Edge Skipped because ID not found: [" + edge.getValue0() + "]"); + edgeSkipCounter++; + continue; + } + else if (!idToType.containsKey(edge.getValue1())) { + System.out.println(" Edge Skipped because ID not found: [" + edge.getValue1() + "]"); + LOGGER.info(" Edge Skipped because ID not found: [" + edge.getValue1() + "]"); + edgeSkipCounter++; + continue; + } + else { + String colA = idToType.get(edge.getValue1()).replace("-","_"); + String colB = idToType.get(edge.getValue0()).replace("-","_"); + + String edLabel = edge.getValue2(); // if we start using edLabel to sort + String edKey = colA + "__" + colB; + // Note the edgeKeys table has nodeTypeX__nodeTypeY as the key + // The value stored for that key just has "edge__" pre-pended which + // is the key used by the tableCount thing + String tcEdKey = ""; + if (!edgeKeys.containsKey(edKey)) { + tcEdKey = edgeKeys.get(colB + "__" + colA ); + } else { + tcEdKey = edgeKeys.get(edKey); + } + + if( !tableCountHash.containsKey(tcEdKey)) { + int ct = 1; + tableCountHash.put(tcEdKey, ct); + edgeTableCounter++; + } + else { + int tmpCt = tableCountHash.get(tcEdKey); + tmpCt++; + tableCountHash.remove(tcEdKey); + tableCountHash.put(tcEdKey, tmpCt); + } + edgeCounter++; + } + } + + msg = " Processed a total of " + edgeCounter + " Edge Records ------"; + LOGGER.debug(msg); + System.out.println(msg); + + msg = " Found data for this many edgeTables: " + edgeTableCounter; + LOGGER.debug(msg); + System.out.println(msg); + + msg = " >> Skipped a total of " + edgeSkipCounter + " Edge Records ------"; + LOGGER.debug(msg); + System.out.println(msg); + + return tableCountHash; + + } + + + +} diff --git a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java index 8fc6295..99b1619 100644 --- a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java +++ b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java @@ -18,14 +18,25 @@ * ============LICENSE_END========================================================= */ package org.onap.aai.datacleanup; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.nio.file.Files; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileTime; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog; import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LogFormatTools; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.util.AAIConfig; import org.onap.aai.util.AAIConstants; import org.onap.logging.filter.base.ONAPComponents; -import org.onap.logging.ref.slf4j.ONAPLogConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -33,18 +44,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.nio.file.Files; -import java.nio.file.attribute.BasicFileAttributes; -import java.nio.file.attribute.FileTime; -import java.text.SimpleDateFormat; -import java.util.Calendar; -import java.util.Date; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - @Component @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties") public class DataCleanupTasks { @@ -73,6 +72,7 @@ public class DataCleanupTasks { String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE"; String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming"; File path = new File(dataGroomingDir); + File archivepath = new File(archiveDir); File dataGroomingPath = new File(dataGroomingArcDir); logger.debug("The logDir is " + logDir); @@ -138,7 +138,9 @@ public class DataCleanupTasks { * */ public boolean directoryExists(String dir) { - return new File(dir).exists(); + File path = new File(dir); + boolean exists = path.exists(); + return exists; } public Date getZipDate(Integer days) { @@ -177,7 +179,7 @@ public class DataCleanupTasks { logger.debug("Inside the archive folder"); String filename = file.getName(); logger.debug("file name is " +filename); - + String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename; File dataGroomingPath = new File(afterArchiveDir); @@ -230,8 +232,10 @@ public class DataCleanupTasks { */ @Scheduled(cron = "${datasnapshotcleanup.cron}" ) public void dataSnapshotCleanup() { - - logger.info(ONAPLogConstants.Markers.ENTRY, "Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date())); + + auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() ); + + logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date())); try { String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; @@ -289,11 +293,88 @@ public class DataCleanupTasks { } } } + dmaapEventsDataCleanup(newAgeDelete); + dataMigrationCleanup(); } catch (Exception e) { ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+LogFormatTools.getStackTop(e)); logger.debug("AAI_4000", "Exception running cron job for DataCleanup"+LogFormatTools.getStackTop(e)); } - logger.info(ONAPLogConstants.Markers.EXIT, "Ended cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date())); - } + logger.debug("Ended cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date())); + auditLog.logAfter(); + } + public void dmaapEventsDataCleanup(Date deleteAge) { + + logger.debug("Started dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date())); + + try { + String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; + String dmaapEventsDataDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dmaapEvents"; + File path = new File(dmaapEventsDataDir); + + logger.debug("The logDir is " + logDir); + logger.debug("The dmaapEventsDataDir is " + dmaapEventsDataDir); + + //Iterate through the files + File[] listFiles = path.listFiles(); + if(listFiles != null) { + for(File listFile : listFiles) { + if(listFile.isFile()){ + logger.debug("The file name in dmaapEvents is: " +listFile.getName()); + Date fileCreateDate = fileCreationMonthDate(listFile); + logger.debug("The fileCreateDate in dmaapEvents is " + fileCreateDate); + if( fileCreateDate.compareTo(deleteAge) < 0) { + delete(listFile); + logger.debug("Deleted " + listFile.getName()); + } + } + } + } + + } + catch (Exception e) { + ErrorLogHelper.logError("AAI_4000", "Exception in dmaapEventsDataCleanup"); + logger.debug("AAI_4000", "Exception in dmaapEventsDataCleanup "+LogFormatTools.getStackTop(e)); + } + logger.debug("Ended cron dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date())); + } + + public void dataMigrationCleanup() throws AAIException { + Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datamigration.agedelete"); + + Date deleteAge = getZipDate(ageDeleteSnapshot); + + logger.debug("Started dataMigrationCleanup @ " + simpleDateFormat.format(new Date())); + + try { + String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; + String dataMigrationCleanupDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migration-input-files"; + File path = new File(dataMigrationCleanupDir); + + logger.debug("The logDir is " + logDir); + logger.debug("The migrationInputFilesDir is " + dataMigrationCleanupDir); + + //Iterate through the files + File[] listFiles = path.listFiles(); + if(listFiles != null) { + for(File listFile : listFiles) { + if(listFile.isFile()){ + logger.debug("The file name in migration-input-files is: " +listFile.getName()); + Date fileCreateDate = fileCreationMonthDate(listFile); + logger.debug("The fileCreateDate in migration-input-files is " + fileCreateDate); + if( fileCreateDate.compareTo(deleteAge) < 0) { + delete(listFile); + logger.debug("Deleted " + listFile.getName()); + } + } + } + } + + } + catch (Exception e) { + ErrorLogHelper.logError("AAI_4000", "Exception in dataMigrationCleanup"); + logger.debug("AAI_4000", "Exception in dataMigrationCleanup "+LogFormatTools.getStackTop(e)); + } + logger.debug("Ended cron dataMigrationCleanup @ " + simpleDateFormat.format(new Date())); + } } diff --git a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java index 2d0625c..028c729 100644 --- a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java +++ b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java @@ -19,15 +19,29 @@ */ package org.onap.aai.dataexport; -import com.att.eelf.configuration.Configuration; -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.comparator.LastModifiedFileComparator; -import org.apache.commons.io.filefilter.DirectoryFileFilter; -import org.apache.commons.io.filefilter.FileFileFilter; -import org.apache.commons.io.filefilter.RegexFileFilter; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Properties; +import java.util.TreeMap; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog; import org.onap.aai.dbgen.DynamicPayloadGenerator; import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; import org.onap.aai.logging.ErrorLogHelper; import org.onap.aai.logging.LogFormatTools; @@ -35,18 +49,22 @@ import org.onap.aai.setup.SchemaVersions; import org.onap.aai.util.AAIConfig; import org.onap.aai.util.AAIConstants; import org.onap.logging.filter.base.ONAPComponents; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.onap.logging.ref.slf4j.ONAPLogConstants; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.PropertySource; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import java.io.*; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.att.eelf.configuration.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.MDC; +import org.apache.commons.io.comparator.LastModifiedFileComparator; +import org.apache.commons.io.filefilter.DirectoryFileFilter; +import org.apache.commons.io.filefilter.FileFileFilter; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.RegexFileFilter; /** * DataExportTasks obtains a graph snapshot and invokes DynamicPayloadGenerator @@ -54,11 +72,12 @@ import java.util.regex.Pattern; */ @Component @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties") +@ConditionalOnProperty(prefix = "dataexporttask", name = "cron") public class DataExportTasks { private static final Logger LOGGER; private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); - + static { System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName()); Properties props = System.getProperties(); @@ -94,11 +113,19 @@ public class DataExportTasks { /** * The exportTask method. * + * @throws AAIException, Exception */ - public void exportTask() throws Exception { - AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog(); + public void exportTask() throws AAIException, Exception { + AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog(); auditLog.logBefore("dataExportTask", ONAPComponents.AAI.toString()); LOGGER.info("Started exportTask: " + dateFormat.format(new Date())); + try { + String isDataExportEnabled = AAIConfig.get("aai.dataexport.enable"); + } catch (AAIException ex){ + LOGGER.info("Ended exportTask: " + dateFormat.format(new Date()) + " " + ex.getMessage()); + auditLog.logAfter(); + throw ex; + } if (AAIConfig.get("aai.dataexport.enable").equalsIgnoreCase("false")) { LOGGER.debug("Data Export is not enabled"); return; @@ -208,7 +235,7 @@ public class DataExportTasks { LOGGER.debug("Exception while running the check to see if dataExport is running "+ LogFormatTools.getStackTop(e)); } - return count > 0; + return count > 0; } /** @@ -275,7 +302,7 @@ public class DataExportTasks { String g1 = m.group(1); LOGGER.debug ("Found group " + g1); if ( !fileMap.containsKey(g1) ) { - ArrayList<File> l = new ArrayList<>(); + ArrayList<File> l = new ArrayList<File>(); l.add(f); fileMap.put(g1, l); } @@ -317,6 +344,7 @@ public class DataExportTasks { /** * The deletePayload method deletes all the payload files that it finds at targetDirectory * @param targetDirFile the directory that contains payload files + * @throws AAIException */ private static void deletePayload(File targetDirFile) { diff --git a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java index 88bfebb..3884a01 100644 --- a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java +++ b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java @@ -47,7 +47,7 @@ import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.onap.aai.GraphAdminApp; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.util.GraphAdminConstants; import org.onap.logging.ref.slf4j.ONAPLogConstants; import org.onap.aai.dbmap.AAIGraph; diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java index 217d6c0..946489b 100644 --- a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java +++ b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java @@ -317,7 +317,7 @@ public class DataSnapshot { } else if( command.equals("MULTITHREAD_RELOAD") ){ // Note - this will use as many threads as the snapshot file is - // broken up into. (up to a limit) + // broken up into. (up to a limit - whatever the 'threadCount' variable is set to) if (args.length >= 2) { // Since they are re-loading, they need to pass the snapshot file name to use. // We expected the file to be found in our snapshot directory. Note - if @@ -357,8 +357,14 @@ public class DataSnapshot { } } - - threadCount4Create = cArgs.threadCount; + try { + threadCount4Create = cArgs.threadCount; + } + catch ( NumberFormatException nfe ){ + ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount found for DataSnapshot [" + cArgs.threadCount + "]"); + LOGGER.debug("Bad (non-integer) threadCount found by DataSnapshot [" + cArgs.threadCount + "]"); + AAISystemExitUtil.systemExitCloseAAIGraph(1); + } maxNodesPerFile4Create = cArgs.maxNodesPerFile; //Print Defaults LOGGER.debug("DataSnapshot command is [" + cArgs.command + "]"); @@ -375,8 +381,7 @@ public class DataSnapshot { LOGGER.debug("VertToEdgeProcDelay is [" + cArgs.vertToEdgeProcDelay + "]"); LOGGER.debug("StaggerThreadDelay is [" + cArgs.staggerThreadDelay + "]"); LOGGER.debug("Caller process is ["+ cArgs.caller + "]"); - - + //Print non-default values if (!AAIConfig.isEmpty(cArgs.fileName)){ LOGGER.debug("Snapshot file name (if not default) to use is [" + cArgs.fileName + "]"); @@ -553,6 +558,9 @@ public class DataSnapshot { // NOTE - it will only use as many threads as the number of files the // snapshot is written to. Ie. if you have a single-file snapshot, // then this will be single-threaded. + // If the number of files is greater than the 'threadCount' parameter, + // then we will use more than one pass to keep the number of simultaneous + // threads below the threadCount param. // LOGGER.debug(" Command = " + command ); @@ -561,114 +569,129 @@ public class DataSnapshot { } ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false); int fCount = snapFilesArr.size(); + int threadPassesNeeded = (int) Math.ceil((double)fCount / (double)threadCount4Create); + int filesPerPass = (int) Math.ceil((double)fCount / (double)threadPassesNeeded); + JanusGraph graph1 = AAIGraph.getInstance().getGraph(); long timeStart = System.nanoTime(); HashMap <String,String> old2NewVertIdMap = new HashMap <String,String> (); - // We're going to try loading in the vertices - without edges or properties - // using Separate threads - - ExecutorService executor = Executors.newFixedThreadPool(fCount); - List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>(); - - for( int i=0; i < fCount; i++ ){ - File f = snapFilesArr.get(i); + ExecutorService executor = Executors.newFixedThreadPool(fCount); + int threadFailCount = 0; + + LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs + + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + + ", maxErrorsPerThread = " + maxErrorsPerThread ); + + // -------------------------------------- + // Step 1 -- Load empty vertices + // -------------------------------------- + int fileNo = 0; + for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){ + List<Future<HashMap<String,String>>> listFutV = new ArrayList<Future<HashMap<String,String>>>(); + + int thisPassCount = 0; + while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){ + File f = snapFilesArr.get(fileNo); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); - LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices ----"); - LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs - + ", maxErrorsPerThread = " + maxErrorsPerThread ); Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER); Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader); - // add Future to the list, we can get return value using Future - list.add(future); - LOGGER.debug(" -- Starting PartialDbLoad VERT_ONLY thread # "+ i ); + // add future to the list, we can get return value later + listFutV.add(future); + LOGGER.debug(" -- Starting PartialDbLoad VERT_ONLY file # "+ fileNo + + "( passNo = " + passNo + ", passIndex = " + thisPassCount + ")"); + + thisPassCount++; + fileNo++; } - + int threadCount4Reload = 0; - int threadFailCount = 0; - for(Future<HashMap<String,String>> fut : list){ - threadCount4Reload++; - try { - old2NewVertIdMap.putAll(fut.get()); - LOGGER.debug(" -- back from PartialVertexLoader. returned thread # " + threadCount4Reload + - ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() ); - } - catch (InterruptedException e) { - threadFailCount++; - AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); + for(Future<HashMap<String,String>> fut : listFutV){ + threadCount4Reload++; + try { + old2NewVertIdMap.putAll(fut.get()); + LOGGER.debug(" -- back from PartialVertexLoader. returned pass # " + + passNo + ", thread # " + + threadCount4Reload + + ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() ); + } + catch (InterruptedException e) { + threadFailCount++; + AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); ErrorLogHelper.logException(ae); - } - catch (ExecutionException e) { - threadFailCount++; - AAIException ae = new AAIException("AAI_6128", e , "ExecutionException"); + } + catch (ExecutionException e) { + threadFailCount++; + AAIException ae = new AAIException("AAI_6128", e , "ExecutionException"); ErrorLogHelper.logException(ae); - } - } - - executor.shutdown(); - - if( threadFailCount > 0 ) { - String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully. "; - LOGGER.debug(emsg); - throw new Exception( emsg ); + } } + } // end of passes for loading empty vertices + + executor.shutdown(); + if( threadFailCount > 0 ) { + String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully. "; + LOGGER.debug(emsg); + throw new Exception( emsg ); + } - long timeX = System.nanoTime(); - long diffTime = timeX - timeStart; - long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); - long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); - LOGGER.debug(" -- To reload just the vertex ids from the snapshot files, it took: " + - minCount + " minutes, " + secCount + " seconds " ); - - // Give the DB a little time to chew on all those vertices - Thread.sleep(vertToEdgeProcDelay); - - // ---------------------------------------------------------------------------------------- - LOGGER.debug("\n\n\n -- Now do the edges/props ----------------------"); - // ---------------------------------------------------------------------------------------- + long timeX = System.nanoTime(); + long diffTime = timeX - timeStart; + long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); + long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); + LOGGER.debug(" -- To reload just the vertex ids from the snapshot files, it took: " + + minCount + " minutes, " + secCount + " seconds " ); + // Give the DB a little time to chew on all those new vertices + Thread.sleep(vertToEdgeProcDelay); - // We're going to try loading in the edges and missing properties - // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader - // so that the String-updates to the GraphSON will happen in the threads instead of - // here in the un-threaded calling method. - executor = Executors.newFixedThreadPool(fCount); - ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>(); - for( int i=0; i < fCount; i++ ){ - File f = snapFilesArr.get(i); + + // ------------------------------------------------------------- + // Step 2 -- Load Edges and properties onto the empty vertices + // ------------------------------------------------------------- + LOGGER.debug("\n\n\n -- Now load the edges/properties ----------------------"); + executor = Executors.newFixedThreadPool(fCount); + + fileNo = 0; + for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){ + ArrayList<Future<ArrayList<String>>> listFutEdg = new ArrayList<Future<ArrayList<String>>>(); + + int thisPassCount = 0; + while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){ + File f = snapFilesArr.get(fileNo); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); - LOGGER.debug(" -- Call the PartialPropAndEdgeLoader for Properties and EDGEs ----"); - LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs - + ", maxErrorsPerThread = " + maxErrorsPerThread ); - - Callable eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, + Callable eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, edgeAddDelayMs, failureDelayMs, retryDelayMs, old2NewVertIdMap, maxErrorsPerThread, LOGGER); Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader); - //add Future to the list, we can get return value using Future - listEdg.add(future); - LOGGER.debug(" -- Starting PartialPropAndEdge thread # "+ i ); + // add future to the list, we can wait for it below + listFutEdg.add(future); + LOGGER.debug(" -- Starting PartialPropAndEdge file # " + + fileNo + " (pass # " + passNo + ", passIndex " + + thisPassCount + ")" ); + + thisPassCount++; + fileNo++; } - threadCount4Reload = 0; - for(Future<ArrayList<String>> fut : listEdg){ - threadCount4Reload++; - try{ - fut.get(); // DEBUG -- should be doing something with the return value if it's not empty - ie. errors - LOGGER.debug(" -- back from PartialPropAndEdgeLoader. thread # " + threadCount4Reload ); - } + int threadCount4Reload = 0; + for( Future<ArrayList<String>> fut : listFutEdg ){ + threadCount4Reload++; + try{ + fut.get(); + LOGGER.debug(" -- back from PartialPropAndEdgeLoader. pass # " + + passNo + ", thread # " + threadCount4Reload ); + } catch (InterruptedException e) { threadFailCount++; AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); @@ -680,30 +703,32 @@ public class DataSnapshot { ErrorLogHelper.logException(ae); } } + + } // end of passes for reloading edges and properties - executor.shutdown(); + executor.shutdown(); - if( threadFailCount > 0 ) { - String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully. "; - LOGGER.debug(emsg); - throw new Exception( emsg ); - } + if( threadFailCount > 0 ) { + String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully. "; + LOGGER.debug(emsg); + throw new Exception( emsg ); + } - // This is needed so we can see the data committed by the called threads - graph1.tx().commit(); + // This is needed so we can see the data committed by the called threads + graph1.tx().commit(); - long timeEnd = System.nanoTime(); - diffTime = timeEnd - timeX; - minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); - secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); - LOGGER.debug(" -- To reload the edges and properties from snapshot files, it took: " + - minCount + " minutes, " + secCount + " seconds " ); + long timeEnd = System.nanoTime(); + diffTime = timeEnd - timeX; + minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); + secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); + LOGGER.debug(" -- To reload the edges and properties from snapshot files, it took: " + + minCount + " minutes, " + secCount + " seconds " ); - long totalDiffTime = timeEnd - timeStart; - long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime); - long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount); - LOGGER.debug(" -- TOTAL multi-threaded reload time: " + - totalMinCount + " minutes, " + totalSecCount + " seconds " ); + long totalDiffTime = timeEnd - timeStart; + long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime); + long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount); + LOGGER.debug(" -- TOTAL multi-threaded reload time: " + + totalMinCount + " minutes, " + totalSecCount + " seconds " ); } else if (command.equals("CLEAR_ENTIRE_DATABASE")) { // ------------------------------------------------------------------ diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot4HistInit.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot4HistInit.java index 8d250d7..9aba8cf 100644 --- a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot4HistInit.java +++ b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot4HistInit.java @@ -40,7 +40,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.tinkerpop.gremlin.structure.io.IoCore; @@ -75,8 +75,8 @@ import com.beust.jcommander.ParameterException; public class DataSnapshot4HistInit { - private static Logger LOGGER; - + private static Logger LOGGER = LoggerFactory.getLogger(DataSnapshot4HistInit.class); + /* Using realtime d */ private static final String REALTIME_DB = "realtime"; @@ -109,6 +109,13 @@ public class DataSnapshot4HistInit { */ public static void main(String[] args) { + // Set the logging file properties to be used by EELFManager + System.setProperty("aai.service.name", DataSnapshot4HistInit.class.getSimpleName()); + Properties props = System.getProperties(); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS); + props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG); + + AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(); PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration(); initializer.initialize(ctx); @@ -149,13 +156,6 @@ public class DataSnapshot4HistInit { public boolean executeCommand(String[] args) { - // Set the logging file properties to be used by EELFManager - System.setProperty("aai.service.name", DataSnapshot4HistInit.class.getSimpleName()); - Properties props = System.getProperties(); - props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS); - props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG); - LOGGER = LoggerFactory.getLogger(DataSnapshot4HistInit.class); - Boolean dbClearFlag = false; JanusGraph graph = null; String command = "UNKNOWN"; @@ -576,19 +576,25 @@ public class DataSnapshot4HistInit { // NOTE - it will only use as many threads as the number of files the // snapshot is written to. Ie. if you have a single-file snapshot, // then this will be single-threaded. + // If the number of files is greater than the 'threadCount' parameter, + // then we will use more than one pass to keep the number of simultaneous + // threads below the threadCount param. // LOGGER.debug(" Command = " + command ); + if (cArgs.oldFileDir != null && cArgs.oldFileDir != ""){ targetDir = cArgs.oldFileDir; } ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false); int fCount = snapFilesArr.size(); + int threadPassesNeeded = (int) Math.ceil((double)fCount / (double)threadCount4Create); + int filesPerPass = (int) Math.ceil((double)fCount / (double)threadPassesNeeded); + JanusGraph graph1 = AAIGraph.getInstance().getGraph(); - GraphAdminDBUtils.logConfigs(graph1.configuration()); long timeStart = System.nanoTime(); - HashMap <String,String> old2NewVertIdMap = new <String,String> HashMap (); - HashMap <String,ArrayList<String>> nodeKeyNames = new <String,ArrayList<String>> HashMap (); - + GraphAdminDBUtils.logConfigs(graph1.configuration()); + HashMap <String,String> old2NewVertIdMap = new HashMap <String,String> (); + HashMap <String,ArrayList<String>> nodeKeyNames = new HashMap <String,ArrayList<String>> (); try { LOGGER.debug("call getNodeKeyNames ()" ); nodeKeyNames = getNodeKeyNames(); @@ -597,142 +603,162 @@ public class DataSnapshot4HistInit { ErrorLogHelper.logException(ae); AAISystemExitUtil.systemExitCloseAAIGraph(1); } + + ExecutorService executor = Executors.newFixedThreadPool(fCount); + int threadFailCount = 0; - // We're going to try loading in the vertices - without edges or properties - // using Separate threads + LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs + + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs + + ", maxErrorsPerThread = " + maxErrorsPerThread ); + + // -------------------------------------- + // Step 1 -- Load empty vertices + // -------------------------------------- + int fileNo = 0; + for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){ + List<Future<HashMap<String,String>>> listFutV = new ArrayList<Future<HashMap<String,String>>>(); - ExecutorService executor = Executors.newFixedThreadPool(fCount); - List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>(); - for( int i=0; i < fCount; i++ ){ - File f = snapFilesArr.get(i); + int thisPassCount = 0; + while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){ + File f = snapFilesArr.get(fileNo); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); - LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices ----"); - LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs - + ", maxErrorsPerThread = " + maxErrorsPerThread ); Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER); Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader); - // add Future to the list, we can get return value using Future - list.add(future); - LOGGER.debug(" -- Starting PartialDbLoad VERT_ONLY thread # "+ i ); + // add future to the list, we can get return value later + listFutV.add(future); + LOGGER.debug(" -- Starting PartialDbLoad VERT_ONLY file # "+ fileNo + + "( passNo = " + passNo + ", passIndex = " + thisPassCount + ")"); + + thisPassCount++; + fileNo++; } - + int threadCount4Reload = 0; - int threadFailCount = 0; - for(Future<HashMap<String,String>> fut : list){ - threadCount4Reload++; - try { - old2NewVertIdMap.putAll(fut.get()); - LOGGER.debug(" -- back from PartialVertexLoader. returned thread # " + threadCount4Reload + - ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() ); - } - catch (InterruptedException e) { - threadFailCount++; - AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); - ErrorLogHelper.logException(ae); - } - catch (ExecutionException e) { - threadFailCount++; - AAIException ae = new AAIException("AAI_6128", e , "ExecutionException"); - ErrorLogHelper.logException(ae); - } - } - executor.shutdown(); - - if( threadFailCount > 0 ) { - String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully. "; - LOGGER.debug(emsg); - throw new Exception( emsg ); + for(Future<HashMap<String,String>> fut : listFutV){ + threadCount4Reload++; + try { + old2NewVertIdMap.putAll(fut.get()); + LOGGER.debug(" -- back from PartialVertexLoader. returned pass # " + + passNo + ", thread # " + + threadCount4Reload + + ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() ); + } + catch (InterruptedException e) { + threadFailCount++; + AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); + ErrorLogHelper.logException(ae); + } + catch (ExecutionException e) { + threadFailCount++; + AAIException ae = new AAIException("AAI_6128", e , "ExecutionException"); + ErrorLogHelper.logException(ae); + } } + } // end of passes for loading empty vertices + + executor.shutdown(); + if( threadFailCount > 0 ) { + String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully. "; + LOGGER.debug(emsg); + throw new Exception( emsg ); + } - long timeX = System.nanoTime(); - long diffTime = timeX - timeStart; - long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); - long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); - LOGGER.debug(" -- To reload just the vertex ids from the snapshot files, it took: " + - minCount + " minutes, " + secCount + " seconds " ); + long timeX = System.nanoTime(); + long diffTime = timeX - timeStart; + long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); + long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); + LOGGER.debug(" -- To reload just the vertex ids from the snapshot files, it took: " + + minCount + " minutes, " + secCount + " seconds " ); - // Give the DB a little time to chew on all those vertices - Thread.sleep(vertToEdgeProcDelay); + // Give the DB a little time to chew on all those new vertices + Thread.sleep(vertToEdgeProcDelay); - // ---------------------------------------------------------------------------------------- - LOGGER.debug("\n\n\n -- Now do the edges/props ----------------------"); - // ---------------------------------------------------------------------------------------- - // We're going to try loading in the edges and missing properties - // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader - // so that the String-updates to the GraphSON will happen in the threads instead of - // here in the un-threaded calling method. - executor = Executors.newFixedThreadPool(fCount); - ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>(); - for( int i=0; i < fCount; i++ ){ - File f = snapFilesArr.get(i); + // ------------------------------------------------------------- + // Step 2 -- Load Edges and properties onto the empty vertices + // ------------------------------------------------------------- + LOGGER.debug("\n\n\n -- Now load the edges/properties ----------------------"); + executor = Executors.newFixedThreadPool(fCount); + + fileNo = 0; + for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){ + ArrayList<Future<ArrayList<String>>> listFutEdg = new ArrayList<Future<ArrayList<String>>>(); + + int thisPassCount = 0; + while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){ + File f = snapFilesArr.get(fileNo); String fname = f.getName(); String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname; Thread.sleep(cArgs.staggerThreadDelay); // Stagger the threads a bit LOGGER.debug(" -- Read file: [" + fullSnapName + "]"); - LOGGER.debug(" -- Call the PartialPropAndEdgeLoader4HistInit for Properties and EDGEs ----"); - LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs - + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs - + ", maxErrorsPerThread = " + maxErrorsPerThread ); - - - Callable eLoader = new PartialPropAndEdgeLoader4HistInit(graph1, fullSnapName, + Callable eLoader = new PartialPropAndEdgeLoader4HistInit(graph1, fullSnapName, edgeAddDelayMs, failureDelayMs, retryDelayMs, old2NewVertIdMap, maxErrorsPerThread, LOGGER, scriptStartTime, nodeKeyNames); + Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader); - //add Future to the list, we can get return value using Future - listEdg.add(future); - LOGGER.debug(" -- Starting PartialPropAndEdge thread # "+ i ); + // add future to the list, we can wait for it below + listFutEdg.add(future); + LOGGER.debug(" -- Starting PartialPropAndEdgeLoader4HistInit file # " + + fileNo + " (pass # " + passNo + ", passIndex " + + thisPassCount + ")" ); + + thisPassCount++; + fileNo++; } - threadCount4Reload = 0; - for(Future<ArrayList<String>> fut : listEdg){ - threadCount4Reload++; - try{ - fut.get(); // DEBUG -- should be doing something with the return value if it's not empty - ie. errors - LOGGER.debug(" -- back from PartialPropAndEdgeLoader. thread # " + threadCount4Reload ); - } + + int threadCount4Reload = 0; + for( Future<ArrayList<String>> fut : listFutEdg ){ + threadCount4Reload++; + try{ + fut.get(); + LOGGER.debug(" -- back from PartialPropAndEdgeLoader4HistInit. pass # " + + passNo + ", thread # " + threadCount4Reload ); + } catch (InterruptedException e) { threadFailCount++; AAIException ae = new AAIException("AAI_6128", e , "InterruptedException"); - ErrorLogHelper.logException(ae); + ErrorLogHelper.logException(ae); } catch (ExecutionException e) { threadFailCount++; AAIException ae = new AAIException("AAI_6128", e , "ExecutionException"); - ErrorLogHelper.logException(ae); + ErrorLogHelper.logException(ae); } } + + } // end of passes for reloading edges and properties - executor.shutdown(); - if( threadFailCount > 0 ) { - String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully. "; - LOGGER.debug(emsg); - throw new Exception( emsg ); - } + executor.shutdown(); - // This is needed so we can see the data committed by the called threads - graph1.tx().commit(); + if( threadFailCount > 0 ) { + String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully. "; + LOGGER.debug(emsg); + throw new Exception( emsg ); + } - long timeEnd = System.nanoTime(); - diffTime = timeEnd - timeX; - minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); - secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); - LOGGER.debug(" -- To reload the edges and properties from snapshot files, it took: " + - minCount + " minutes, " + secCount + " seconds " ); + // This is needed so we can see the data committed by the called threads + graph1.tx().commit(); - long totalDiffTime = timeEnd - timeStart; - long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime); - long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount); - LOGGER.debug(" -- TOTAL multi-threaded reload time: " + - totalMinCount + " minutes, " + totalSecCount + " seconds " ); + long timeEnd = System.nanoTime(); + diffTime = timeEnd - timeX; + minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime); + secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount); + LOGGER.debug(" -- To reload the edges and properties from snapshot files, it took: " + + minCount + " minutes, " + secCount + " seconds " ); + + long totalDiffTime = timeEnd - timeStart; + long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime); + long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount); + LOGGER.debug(" -- TOTAL multi-threaded reload time: " + + totalMinCount + " minutes, " + totalSecCount + " seconds " ); + } else if (command.equals("CLEAR_ENTIRE_DATABASE")) { // ------------------------------------------------------------------ // They are calling this to clear the db before re-loading it @@ -1075,4 +1101,4 @@ public class DataSnapshot4HistInit { } -} +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java index a634e0b..3d9ec69 100644 --- a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java +++ b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java @@ -27,7 +27,7 @@ import org.apache.commons.configuration.PropertiesConfiguration; import org.codehaus.jackson.JsonGenerationException; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.JanusGraphFactory; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraphConfig; import org.onap.aai.edges.EdgeIngestor; import org.onap.aai.exceptions.AAIException; diff --git a/src/main/java/org/onap/aai/dbgen/DupeTool.java b/src/main/java/org/onap/aai/dbgen/DupeTool.java index d8d3ce0..0532e2e 100644 --- a/src/main/java/org/onap/aai/dbgen/DupeTool.java +++ b/src/main/java/org/onap/aai/dbgen/DupeTool.java @@ -28,7 +28,7 @@ import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.structure.*; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.JanusGraphFactory; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.dbmap.AAIGraphConfig; import org.onap.aai.edges.enums.AAIDirection; diff --git a/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java b/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java index 2e1bc4b..ba5fad4 100644 --- a/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java +++ b/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java @@ -30,7 +30,7 @@ import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ObjectNode; import org.codehaus.jackson.type.TypeReference; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.db.props.AAIProperties; import org.onap.aai.dbmap.InMemoryGraph; import org.onap.aai.edges.EdgeIngestor; diff --git a/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java b/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java index 7bb0555..c12f3f3 100644 --- a/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java +++ b/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java @@ -100,29 +100,24 @@ public class ForceDeleteTool { for (int i = 0; i < args.length; i++) { String thisArg = args[i]; argStr4Msg = argStr4Msg + " " + thisArg; + if (thisArg.equals("-action")) { i++; if (i >= args.length) { logger.error(" No value passed with -action option. "); - System.out.println(" No value passed with -action option. "); exit(0); } - else { - actionVal = args[i]; - argStr4Msg = argStr4Msg + " " + actionVal; - } + actionVal = args[i]; + argStr4Msg = argStr4Msg + " " + actionVal; } else if (thisArg.equals("-userId")) { i++; if (i >= args.length) { logger.error(" No value passed with -userId option. "); - System.out.println(" No value passed with -userId option. "); exit(0); } - else { - userIdVal = args[i]; - argStr4Msg = argStr4Msg + " " + userIdVal; - } + userIdVal = args[i]; + argStr4Msg = argStr4Msg + " " + userIdVal; } else if (thisArg.equals("-overRideProtection")) { overRideProtection = true; @@ -134,54 +129,40 @@ public class ForceDeleteTool { i++; if (i >= args.length) { logger.error(" No value passed with -vertexId option. "); - System.out.println(" No value passed with -vertexId option. "); exit(0); } - else { - String nextArg = args[i]; - argStr4Msg = argStr4Msg + " " + nextArg; - - try { - vertexIdLong = Long.parseLong(nextArg); - } catch (Exception e) { - logger.error("Bad value passed with -vertexId option: [" + String nextArg = args[i]; + argStr4Msg = argStr4Msg + " " + nextArg; + try { + vertexIdLong = Long.parseLong(nextArg); + } catch (Exception e) { + logger.error("Bad value passed with -vertexId option: [" + nextArg + "]"); - System.out.println("Bad value passed with -vertexId option: [" - + nextArg + "]"); - exit(0); - } + exit(0); } } else if (thisArg.equals("-params4Collect")) { i++; if (i >= args.length) { logger.error(" No value passed with -params4Collect option. "); - System.out.println(" No value passed with -params4Collect option. "); exit(0); } - else { - dataString = args[i]; - argStr4Msg = argStr4Msg + " " + dataString; - } + dataString = args[i]; + argStr4Msg = argStr4Msg + " " + dataString; } else if (thisArg.equals("-edgeId")) { i++; if (i >= args.length) { logger.error(" No value passed with -edgeId option. "); - System.out.println(" No value passed with -edgeId option. "); exit(0); } - else { - String nextArg = args[i]; - argStr4Msg = argStr4Msg + " " + nextArg; - edgeIdStr = nextArg; - } + String nextArg = args[i]; + argStr4Msg = argStr4Msg + " " + nextArg; + edgeIdStr = nextArg; } else { logger.error(" Unrecognized argument passed to ForceDeleteTool: [" + thisArg + "]. "); - System.out.println(" Unrecognized argument passed to ForceDeleteTool: [" - + thisArg + "]"); logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection -params4Collect -DISPLAY_ALL_VIDS"); exit(0); } @@ -368,19 +349,18 @@ public class ForceDeleteTool { System.out.println(infMsg); exit(0); } - else { - if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){ - thisEdge.remove(); - graph.tx().commit(); - String infMsg = ">>>>>>>>>> Removed edge with edgeId = " + edgeIdStr; - logger.debug( infMsg ); - System.out.println(infMsg); - } - else { - String infMsg = " Delete Cancelled. "; - System.out.println(infMsg); - logger.debug( infMsg ); - } + + if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){ + thisEdge.remove(); + graph.tx().commit(); + String infMsg = ">>>>>>>>>> Removed edge with edgeId = " + edgeIdStr; + logger.debug( infMsg ); + System.out.println(infMsg); + } + else { + String infMsg = " Delete Cancelled. "; + System.out.println(infMsg); + logger.debug( infMsg ); } exit(0); } diff --git a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java index 15b4c81..6e48d4b 100644 --- a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java +++ b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java @@ -20,7 +20,7 @@ package org.onap.aai.dbgen.schemamod; import com.att.eelf.configuration.Configuration; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Loader; import org.onap.aai.introspection.LoaderFactory; diff --git a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod4Hist.java b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod4Hist.java index 837f8a8..4c393eb 100644 --- a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod4Hist.java +++ b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod4Hist.java @@ -20,7 +20,7 @@ package org.onap.aai.dbgen.schemamod; import com.att.eelf.configuration.Configuration; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.Loader; import org.onap.aai.introspection.LoaderFactory; diff --git a/src/main/java/org/onap/aai/historytruncate/HistoryTruncate.java b/src/main/java/org/onap/aai/historytruncate/HistoryTruncate.java index 45b5d04..e0adec0 100644 --- a/src/main/java/org/onap/aai/historytruncate/HistoryTruncate.java +++ b/src/main/java/org/onap/aai/historytruncate/HistoryTruncate.java @@ -22,7 +22,7 @@ package org.onap.aai.historytruncate; import java.util.*; import java.util.concurrent.TimeUnit; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Edge; diff --git a/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java b/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java index f591120..5d32649 100644 --- a/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java +++ b/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java @@ -62,7 +62,7 @@ public class VersionInterceptor extends AAIContainerFilter implements ContainerR String uri = requestContext.getUriInfo().getPath(); - if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools")) { + if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools") || uri.endsWith("audit-sql-db")) { return; } diff --git a/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java b/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java index 3f25119..d2a336b 100644 --- a/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java +++ b/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java @@ -140,7 +140,7 @@ public abstract class EdgeSwingMigrator extends Migrator { // If the nodeTypes don't match, throw an error if( !oldNodeType.equals(newNodeType) ){ - logger.debug ( "Can not swing edge from a [" + oldNodeType + "] node to a [" + + logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" + newNodeType + "] node. "); success = false; return; @@ -182,7 +182,7 @@ public abstract class EdgeSwingMigrator extends Migrator { } String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; - logger.debug ( "\nSwinging [" + eLabel + "] OUT edge. \n >> Unchanged side is [" + logger.info ( "\nSwinging [" + eLabel + "] OUT edge. \n >> Unchanged side is [" + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. "); // remove the old edge @@ -203,8 +203,9 @@ public abstract class EdgeSwingMigrator extends Migrator { Map.Entry pair = (Map.Entry)it.next(); newOutE.property(pair.getKey().toString(), pair.getValue().toString() ); } + logger.info("\n Edge swing of [" + eLabel + "] OUT edge successful"); }else { - logger.debug("\n Edge was not swung due to Multiplicity Rule Violation..."); + logger.info("\n Edge was not swung due to Multiplicity Rule Violation..."); } } } @@ -245,7 +246,7 @@ public abstract class EdgeSwingMigrator extends Migrator { } String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent() ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; - logger.debug ( "\nSwinging [" + eLabel + "] IN edge. \n >> Unchanged side is [" + logger.info ( "\nSwinging [" + eLabel + "] IN edge. \n >> Unchanged side is [" + otherSideNodeType + "][" + otherSideUri + "] \n >> Edge used to go to [" + oldNodeType + "][" + oldUri + "],\n >> now swung to [" + newNodeType + "][" + newUri + "]. "); @@ -267,8 +268,9 @@ public abstract class EdgeSwingMigrator extends Migrator { Map.Entry pair = (Map.Entry)it.next(); newInE.property(pair.getKey().toString(), pair.getValue().toString() ); } + logger.info("\n Edge swing of [" + eLabel + "] IN edge successful"); } else { - logger.debug("\t Edge was not swung due to Multiplicity Rule Violation..."); + logger.info("\t Edge was not swung due to Multiplicity Rule Violation..."); } } } @@ -276,7 +278,7 @@ public abstract class EdgeSwingMigrator extends Migrator { } } catch (Exception e) { - logger.error("error encountered", e); + logger.info("error encountered", e); success = false; } } diff --git a/src/main/java/org/onap/aai/migration/MigrationController.java b/src/main/java/org/onap/aai/migration/MigrationController.java index 8d758e3..5e969bf 100644 --- a/src/main/java/org/onap/aai/migration/MigrationController.java +++ b/src/main/java/org/onap/aai/migration/MigrationController.java @@ -19,7 +19,7 @@ */ package org.onap.aai.migration; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.edges.EdgeIngestor; import org.onap.aai.exceptions.AAIException; diff --git a/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java b/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java index 7cb71ca..97ed045 100644 --- a/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java +++ b/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java @@ -99,8 +99,8 @@ public class MigrationControllerInternal { props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml"); props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES); - logger = LoggerFactory.getLogger(MigrationControllerInternal.class.getSimpleName()); MDC.put("logFilenameAppender", MigrationController.class.getSimpleName()); + logger = LoggerFactory.getLogger(MigrationControllerInternal.class.getSimpleName()); CommandLineArgs cArgs = new CommandLineArgs(); diff --git a/src/main/java/org/onap/aai/migration/Migrator.java b/src/main/java/org/onap/aai/migration/Migrator.java index 7d6a7c1..9498cd1 100644 --- a/src/main/java/org/onap/aai/migration/Migrator.java +++ b/src/main/java/org/onap/aai/migration/Migrator.java @@ -21,7 +21,6 @@ package org.onap.aai.migration; import java.io.File; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; @@ -39,6 +38,7 @@ import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; import org.json.JSONException; import org.json.JSONObject; +import org.onap.aai.aailog.logs.AaiDebugLog; import org.onap.aai.edges.EdgeIngestor; import org.onap.aai.edges.enums.EdgeType; import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException; @@ -54,9 +54,9 @@ import org.onap.aai.serialization.db.exceptions.NoEdgeRuleFoundException; import org.onap.aai.serialization.engines.TransactionalGraphEngine; import org.onap.aai.setup.SchemaVersion; import org.onap.aai.setup.SchemaVersions; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.MDC; /** * This class defines an A&AI Migration @@ -81,6 +81,12 @@ public abstract class Migrator implements Runnable { protected static final String MIGRATION_ERROR = "Migration Error: "; protected static final String MIGRATION_SUMMARY_COUNT = "Migration Summary Count: "; + + private static AaiDebugLog debugLog = new AaiDebugLog(); + static { + debugLog.setupMDC(); + } + /** * Instantiates a new migrator. @@ -96,8 +102,9 @@ public abstract class Migrator implements Runnable { this.schemaVersions = schemaVersions; initDBSerializer(); this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, "AAI-MIGRATION", this.getMigrationName()); - logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - logger.debug("\tInitilization of " + this.getClass().getSimpleName() + " migration script complete."); + MDC.put("logFilenameAppender", this.getClass().getSimpleName()); + logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + logAndPrint(logger,"\tInitilization of " + this.getClass().getSimpleName() + " migration script complete."); } /** @@ -127,7 +134,7 @@ public abstract class Migrator implements Runnable { */ public void createDmaapFiles(List<String> dmaapMsgList) { String fileName = getMigrationName() + "-" + UUID.randomUUID(); - String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents"; + String logDirectory = System.getProperty("AJSC_HOME") + "/logs/data/dmaapEvents"; File f = new File(logDirectory); f.mkdirs(); @@ -136,10 +143,12 @@ public abstract class Migrator implements Runnable { try { Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator); } catch (IOException e) { + System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() + + " Exception is: " + e.getMessage()); logger.error("Unable to generate file with dmaap msgs for " + getMigrationName(), e); } } else { - logger.debug("No dmaap msgs detected for " + getMigrationName()); + logAndPrint(logger,"No dmaap msgs detected for " + getMigrationName()); } } @@ -150,7 +159,7 @@ public abstract class Migrator implements Runnable { public void createDmaapFilesForDelete(List<Introspector> dmaapDeleteIntrospectorList) {try { System.out.println("dmaapDeleteIntrospectorList :: " + dmaapDeleteIntrospectorList.size()); String fileName = "DELETE-"+ getMigrationName() + "-" + UUID.randomUUID(); - String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents/"; + String logDirectory = System.getProperty("AJSC_HOME") + "/logs/data/dmaapEvents/"; File f = new File(logDirectory); f.mkdirs(); @@ -169,6 +178,8 @@ public abstract class Migrator implements Runnable { finalStr=svIntr.getName() + "#@#" + svIntr.getURI() + "#@#" + str+"\n"; Files.write(Paths.get(logDirectory + "/" + fileName),finalStr.getBytes(),StandardOpenOption.APPEND); } catch (IOException e) { + System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() + + " Exception is: " + e.getMessage()); logger.error("Unable to generate file with dmaap msgs for "+getMigrationName(), e); } @@ -201,6 +212,7 @@ public abstract class Migrator implements Runnable { result.put(pk.key(), pk.value()); } } catch (JSONException e) { + System.out.println("Warning error reading vertex: " + e.getMessage()); logger.error("Warning error reading vertex: " + e); } @@ -223,6 +235,7 @@ public abstract class Migrator implements Runnable { result.put(pk.key(), pk.value()); } } catch (JSONException e) { + System.out.println("Warning error reading edge: " + e.getMessage()); logger.error("Warning error reading edge: " + e); } @@ -404,4 +417,15 @@ public abstract class Migrator implements Runnable { public NotificationHelper getNotificationHelper() { return this.notificationHelper; } + + /** + * Log and print. + * + * @param logger the logger + * @param msg the msg + */ + protected void logAndPrint(Logger logger, String msg) { + System.out.println(msg); + logger.info(msg); + } } diff --git a/src/main/java/org/onap/aai/migration/NotificationHelper.java b/src/main/java/org/onap/aai/migration/NotificationHelper.java index f10a824..f08c56d 100644 --- a/src/main/java/org/onap/aai/migration/NotificationHelper.java +++ b/src/main/java/org/onap/aai/migration/NotificationHelper.java @@ -39,6 +39,7 @@ import org.onap.aai.serialization.engines.query.QueryEngine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.MDC; import org.onap.aai.setup.SchemaVersions; /** @@ -46,7 +47,7 @@ import org.onap.aai.setup.SchemaVersions; */ public class NotificationHelper { - private static final Logger LOGGER = LoggerFactory.getLogger(NotificationHelper.class); + protected Logger LOGGER = null; protected final DBSerializer serializer; protected final Loader loader; protected final TransactionalGraphEngine engine; @@ -61,6 +62,9 @@ public class NotificationHelper { this.transactionId = transactionId; this.sourceOfTruth = sourceOfTruth; this.notification = new UEBNotification(loader, loaderFactory, schemaVersions); + MDC.put("logFilenameAppender", this.getClass().getSimpleName()); + LOGGER = LoggerFactory.getLogger(this.getClass().getSimpleName()); + } public void addEvent(Vertex v, Introspector obj, EventAction action, URI uri, String basePath) throws UnsupportedEncodingException, AAIException { diff --git a/src/main/java/org/onap/aai/migration/ValueMigrator.java b/src/main/java/org/onap/aai/migration/ValueMigrator.java index f45b20b..685855c 100644 --- a/src/main/java/org/onap/aai/migration/ValueMigrator.java +++ b/src/main/java/org/onap/aai/migration/ValueMigrator.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -113,6 +112,8 @@ public abstract class ValueMigrator extends Migrator { } } } catch (Exception e) { + System.out.println(String.format("caught exception updating aai-node-type %s's property %s's value to " + + "%s: %s", nodeType, property, newValue.toString(), e.getMessage())); logger.error(String.format("caught exception updating aai-node-type %s's property %s's value to " + "%s: %s", nodeType, property, newValue.toString(), e.getMessage())); logger.error(e.getMessage()); @@ -121,12 +122,12 @@ public abstract class ValueMigrator extends Migrator { this.nodeTotalSuccess.put(nodeType, Integer.toString(this.subTotal)); } - logger.debug ("\n \n ******* Final Summary for " + " " + getMigrationName() +" ********* \n"); + logAndPrint(logger, " ******* Final Summary for " + " " + getMigrationName() +" ********* "); for (Map.Entry<String, String> migratedNode: nodeTotalSuccess.entrySet()) { - logger.debug("Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue()); + logAndPrint(logger,"Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue()); } - logger.debug(this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess); + logAndPrint(logger,this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess); } @@ -136,18 +137,18 @@ public abstract class ValueMigrator extends Migrator { String propertyValue = v.property(property).value().toString(); if (propertyValue.isEmpty()) { v.property(property, newValue); - logger.debug(String.format("Node Type %s: Property %s is empty, adding value %s", + logAndPrint(logger,String.format("Node Type %s: Property %s is empty, adding value %s", nodeType, property, newValue.toString())); this.touchVertexProperties(v, false); updateDmaapList(v); this.migrationSuccess++; this.subTotal++; } else { - logger.debug(String.format("Node Type %s: Property %s value already exists - skipping", + logAndPrint(logger,String.format("Node Type %s: Property %s value already exists - skipping", nodeType, property)); } } else { - logger.debug(String.format("Node Type %s: Property %s does not exist or " + + logAndPrint(logger,String.format("Node Type %s: Property %s does not exist or " + "updateExistingValues flag is set to True - adding the property with value %s", nodeType, property, newValue.toString())); v.property(property, newValue); @@ -178,7 +179,7 @@ public abstract class ValueMigrator extends Migrator { private void updateDmaapList(Vertex v){ String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_" + v.value("resource-version").toString(); dmaapMsgList.add(dmaapMsg); - logger.debug("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList...."); + logAndPrint(logger,"\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList...."); } public boolean isUpdateDmaap(){ diff --git a/src/main/java/org/onap/aai/migration/VertexMerge.java b/src/main/java/org/onap/aai/migration/VertexMerge.java index a2c814e..6480e2a 100644 --- a/src/main/java/org/onap/aai/migration/VertexMerge.java +++ b/src/main/java/org/onap/aai/migration/VertexMerge.java @@ -43,6 +43,7 @@ import org.onap.aai.serialization.db.EdgeSerializer; import org.onap.aai.serialization.engines.TransactionalGraphEngine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.MDC; /** * This class recursively merges two vertices passed in. @@ -52,8 +53,7 @@ import org.slf4j.LoggerFactory; */ public class VertexMerge { - private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); - + protected Logger logger = null; private final GraphTraversalSource g; private final TransactionalGraphEngine engine; private final DBSerializer serializer; @@ -70,6 +70,9 @@ public class VertexMerge { this.loader = builder.getLoader(); this.notificationHelper = builder.getHelper(); this.hasNotifications = builder.isHasNotifications(); + MDC.put("logFilenameAppender", this.getClass().getSimpleName()); + logger = LoggerFactory.getLogger(this.getClass().getSimpleName()); + } /** diff --git a/src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java b/src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java new file mode 100644 index 0000000..cbe15de --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v20; + +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Status; +import org.onap.aai.migration.ValueMigrator; +import org.onap.aai.setup.SchemaVersions; + + +@MigrationPriority(20) +@MigrationDangerRating(2) +@Enabled +public class MigrateL2DefaultToFalse extends ValueMigrator { + + protected static final String L_INTERFACE_NODE_TYPE = "l-interface"; + protected static final String L2_MULTI_PROPERTY = "l2-multicasting"; + + private static Map<String, Map<String, Boolean>> map; + private static Map<String, Boolean> pair; + + public MigrateL2DefaultToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setL2ToFalse(), false); + } + + private static Map<String, Map<String, Boolean>> setL2ToFalse(){ + map = new HashMap<>(); + pair = new HashMap<>(); + + pair.put(L2_MULTI_PROPERTY, false); + + map.put(L_INTERFACE_NODE_TYPE, pair); + + return map; + } + + @Override + public Status getStatus() { + return Status.SUCCESS; + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[]{L_INTERFACE_NODE_TYPE}); + } + + @Override + public String getMigrationName() { + return "MigrateL2DefaultToFalse"; + } + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java b/src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java new file mode 100644 index 0000000..0519cad --- /dev/null +++ b/src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java @@ -0,0 +1,391 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.migration.v20; + +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.javatuples.Pair; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; +import org.onap.aai.db.props.AAIProperties; +import org.onap.aai.edges.EdgeIngestor; +import org.onap.aai.introspection.Introspector; +import org.onap.aai.introspection.LoaderFactory; +import org.onap.aai.migration.EdgeSwingMigrator; +import org.onap.aai.migration.Enabled; +import org.onap.aai.migration.MigrationDangerRating; +import org.onap.aai.migration.MigrationPriority; +import org.onap.aai.migration.Status; +import org.onap.aai.serialization.db.EdgeSerializer; +import org.onap.aai.serialization.engines.TransactionalGraphEngine; +import org.onap.aai.setup.SchemaVersions; +import org.onap.aai.util.AAIConstants; + +@MigrationPriority(20) +@MigrationDangerRating(2) +@Enabled +public class MigrateVlanTag extends EdgeSwingMigrator { + + protected static final String CLOUD_REGION_NODE_TYPE = "cloud-region"; + protected static final String CLOUD_OWNER = "cloud-owner"; + protected static final String CLOUD_REGION_ID = "cloud-region-id"; + protected static final String VLAN_RANGE = "vlan-range"; + protected static final String VLAN_RANGE_ID = "vlan-range-id"; + protected static final String VLAN_TAG = "vlan-tag"; + protected static final String VLAN_TAG_ID = "vlan-tag-id"; + protected static final String UPGRADE_CYCLE = "upgrade-cycle"; + + protected final AtomicInteger skippedRowsCount = new AtomicInteger(0); + protected final AtomicInteger processedRowsCount = new AtomicInteger(0); + private static List<String> dmaapMsgList = new ArrayList<String>(); + private static List<Introspector> dmaapDeleteIntrospectorList = new ArrayList<Introspector>(); + private static int vlanRangeCount = 0; + private static int vlanRangeFailureCount = 0; + private static int vlanTagCount = 0; + private static int vlanTagFailureCount = 0; + + private boolean success = true; + protected int headerLength; + + protected final AtomicInteger falloutRowsCount = new AtomicInteger(0); + + public MigrateVlanTag(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, + EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) { + super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions); + } + + @Override + public void run() { + logger.info("---------- Start Updating vlan-tags under cloud-region ----------"); + + String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs"; + String feedDir = logDir + AAIConstants.AAI_FILESEP + "data" + + AAIConstants.AAI_FILESEP + "migration-input-files" + + AAIConstants.AAI_FILESEP; + String fileName = feedDir + "dslResults.json"; + logger.info(fileName); + logger.info("---------- Processing vlan-tags from file ----------"); + + Map cloudOwnerRegionIdWiseVertexMap = new HashMap(); + Map<String, Vertex> vlanTagIdWiseVertexMap = new HashMap<>(); + + try { + List<Vertex> cloudRegionList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).toList(); + + List<Vertex> vlantagIdList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, VLAN_TAG).toList(); + + String cloudOwner, cloudRegionId; + for (Vertex vertex : cloudRegionList) { + cloudOwner = getCloudOwnerNodeValue(vertex); + cloudRegionId = getCloudRegionIdNodeValue(vertex); + if (!cloudOwner.isEmpty() && !cloudRegionId.isEmpty()) { + cloudOwnerRegionIdWiseVertexMap.put(cloudOwner + "/" + cloudRegionId, vertex); + } + } + + String vlantagId; + for (Vertex vertex : vlantagIdList) { + vlantagId = getVlanTagIdNodeValue(vertex); + if (!vlantagId.isEmpty() && + !(vertex.property(AAIProperties.AAI_URI).isPresent() && + vertex.property(AAIProperties.AAI_URI).toString().contains(CLOUD_REGION_NODE_TYPE))) { + vlanTagIdWiseVertexMap.put(vlantagId, vertex); + } + } + + JSONParser jsonParser = new JSONParser(); + Object obj = jsonParser + .parse(new FileReader(fileName)); + JSONObject vlanRangeResultJsonObj = (JSONObject) obj; + JSONArray vlanRangeResultJsonArray = (JSONArray) vlanRangeResultJsonObj.get("results"); + + JSONObject vlanRangeJsonObject, vlanTagsJsonObject, vlanTagJsonObject; + JSONArray vlanTagJsonArray; + String vlanTagId; + boolean isFirstTime; + Vertex vlanRangeVtx; + Set<String> matchedVlanTagIdSet = new HashSet<String>(); + for (int i = 0; i < vlanRangeResultJsonArray.size(); i++) { + isFirstTime = true; + vlanRangeVtx = null; + vlanRangeJsonObject = (JSONObject) vlanRangeResultJsonArray.get(i); + cloudOwner = getCloudOwnerValueFromUrl((String) vlanRangeJsonObject.get("url")); + cloudRegionId = getCloudRegionIdValueFromUrl((String) vlanRangeJsonObject.get("url")); + if (cloudOwnerRegionIdWiseVertexMap.containsKey(cloudOwner + "/" + cloudRegionId)) {// ap1 key contains + // api2 key + JSONObject vlanRangeInnerObject = (JSONObject) vlanRangeJsonObject.get("vlan-range"); + String vlanRangeId = (String) vlanRangeInnerObject.get("vlan-range-id"); + vlanTagsJsonObject = (JSONObject) vlanRangeInnerObject.get("vlan-tags"); + vlanTagJsonArray = (JSONArray) vlanTagsJsonObject.get("vlan-tag"); + + for (int j = 0; j < vlanTagJsonArray.size(); j++) { + vlanTagJsonObject = (JSONObject) vlanTagJsonArray.get(j); + vlanTagId = (String) vlanTagJsonObject.get("vlan-tag-id"); + if (vlanTagIdWiseVertexMap.containsKey(vlanTagId)) {// ap1 key contains api2 key + matchedVlanTagIdSet.add(vlanTagId); + if (isFirstTime) { + isFirstTime = false; + vlanRangeVtx = createNewVlanRangeFromCloudRegion(vlanRangeInnerObject, cloudOwner, + cloudRegionId, vlanRangeId); + } + Vertex vertex = vlanTagIdWiseVertexMap.get(vlanTagId); + createNewVlanTagFromVlanRange(vlanTagJsonObject, vlanRangeVtx, cloudOwner, cloudRegionId, + vlanRangeId, vertex); + vlanTagIdWiseVertexMap.remove(vlanTagId); + } + } + } + } + logger.info("******* Final Summary of Migrate vlan-tag Migration *********"); + if(!vlanTagIdWiseVertexMap.isEmpty()) { + logger.info("The following vlan-ids in A&AI graph were not found in the Narad input" + + "file and not migrated: "); + for(Vertex vlanTagIdWiseVertex : vlanTagIdWiseVertexMap.values()) { + logger.info(vlanTagIdWiseVertex.property("vlan-tag-id").toString()); + } + } + logger.info(MIGRATION_SUMMARY_COUNT+"Total Vlan Ranges Updated Count:" + vlanRangeCount); + logger.info(MIGRATION_ERROR+"Total Vlan Ranges Not Updated Count:" + vlanRangeFailureCount); + logger.info(MIGRATION_SUMMARY_COUNT+"Total Vlan Tags Updated Sucessfully Count: " + vlanTagCount); + logger.info(MIGRATION_ERROR+"Total Vlan Tags Not Updated Count: " + vlanTagFailureCount); + } catch (FileNotFoundException e) { + logger.info("ERROR: Could not file file " + fileName, e.getMessage()); + logger.error("ERROR: Could not file file " + fileName, e); + success = false; + } catch (IOException e) { + logger.info("ERROR: Issue reading file " + fileName, e.getMessage()); + logger.error("ERROR: Issue reading file " + fileName, e); + success = false; + } catch (Exception e) { + logger.info("encountered exception", e.getMessage()); + logger.error("encountered exception", e); + e.printStackTrace(); + success = false; + } + } + + private Vertex createNewVlanRangeFromCloudRegion(JSONObject vlanRangeJsonObject, String cloudOwner, + String cloudRegionId, String vlanRangeId) { + + Vertex vlanRangeVtx = null; + try { + + GraphTraversal<Vertex, Vertex> cloudRegionVtxList = this.engine.asAdmin().getTraversalSource().V() + .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).has("cloud-owner", cloudOwner) + .has("cloud-region-id", cloudRegionId); + + if (cloudRegionVtxList != null && cloudRegionVtxList.hasNext()) { + Vertex cloudRegionVtx = cloudRegionVtxList.next(); + if (cloudRegionVtx != null) { + Introspector vlanRange = loader.introspectorFromName("vlan-range"); + vlanRangeVtx = serializer.createNewVertex(vlanRange); + vlanRange.setValue("vlan-range-id", vlanRangeId);// required + vlanRange.setValue("vlan-id-lower", (Long) vlanRangeJsonObject.get("vlan-id-lower"));// required + vlanRange.setValue("vlan-id-upper", (Long) vlanRangeJsonObject.get("vlan-id-upper"));// required + vlanRange.setValue("vlan-type", (String) vlanRangeJsonObject.get("vlan-type"));// required + this.createTreeEdge(cloudRegionVtx, vlanRangeVtx); + vlanRangeVtx.property(AAIProperties.AAI_URI, + "/cloud-infrastructure/" + "cloud-regions/cloud-region/" + cloudOwner + "/" + cloudRegionId + + "/vlan-ranges/vlan-range/" + vlanRangeId); + serializer.serializeSingleVertex(vlanRangeVtx, vlanRange, "migrations"); + + logger.info("\t Created new vlan-range " + vlanRangeVtx + " with vlan-range-id = " + + (String) vlanRangeJsonObject.get("vlan-range-id")); + + String dmaapMsg = System.nanoTime() + "_" + vlanRangeVtx.id().toString() + "_" + + vlanRangeVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + vlanRangeCount++; + } + } + + } catch (Exception e) { + vlanRangeFailureCount++; + logger.error("vlan-range failure: ", e); + logger.info("vlan-range with id : " + vlanRangeId + " failed: ", e.getMessage()); + + } + return vlanRangeVtx; + + } + + private Vertex createNewVlanTagFromVlanRange(JSONObject vlanTagJsonObject, Vertex vlanRangeVtx, String cloudOwner, + String cloudRegionId, String vlanRangeId, Vertex oldVlanTagVtx) { + + Vertex vlanTagVtx = null; + try { + Introspector vlanTag = loader.introspectorFromName("vlan-tag"); + vlanTagVtx = serializer.createNewVertex(vlanTag); + String vlanTagId = (String) vlanTagJsonObject.get("vlan-tag-id"); + vlanTag.setValue("vlan-tag-id", vlanTagId);// required + vlanTag.setValue("vlan-tag-role", (String) vlanTagJsonObject.get("vlan-tag-role"));// required + vlanTag.setValue("is-private", (Boolean) vlanTagJsonObject.get("is-private"));// required + if (vlanTagJsonObject.containsKey("vlan-id-inner")) + vlanTag.setValue("vlan-id-inner", (Long) vlanTagJsonObject.get("vlan-id-inner")); + if (vlanTagJsonObject.containsKey("vlan-id-outer")) + vlanTag.setValue("vlan-id-outer", (Long) vlanTagJsonObject.get("vlan-id-outer")); + if (vlanTagJsonObject.containsKey("vlan-tag-function")) + vlanTag.setValue("vlan-tag-function", (String) vlanTagJsonObject.get("vlan-tag-function")); + if (vlanTagJsonObject.containsKey("config-phase")) + vlanTag.setValue("config-phase", (String) vlanTagJsonObject.get("config-phase")); + if (vlanTagJsonObject.containsKey("vlan-tag-type")) + vlanTag.setValue("vlan-tag-type", (String) vlanTagJsonObject.get("vlan-tag-type")); + this.createTreeEdge(vlanRangeVtx, vlanTagVtx); + vlanTagVtx.property(AAIProperties.AAI_URI, + "/cloud-infrastructure/" + "cloud-regions/cloud-region/" + cloudOwner + "/" + cloudRegionId + + "/vlan-ranges/vlan-range/" + vlanRangeId + "/vlan-tags/vlan-tag/" + vlanTagId); + executeModifyOperation(oldVlanTagVtx, vlanTagVtx); + Introspector deletedVlanEvent = serializer.getLatestVersionView(oldVlanTagVtx); + dmaapDeleteIntrospectorList.add(deletedVlanEvent); + oldVlanTagVtx.remove(); + serializer.serializeSingleVertex(vlanTagVtx, vlanTag, "migrations"); + + logger.info("\t Created new vlan-tag " + vlanTagVtx + " with vlan-tag-id = " + + (String) vlanTagJsonObject.get("vlan-tag-id")); + + String dmaapMsg = System.nanoTime() + "_" + vlanTagVtx.id().toString() + "_" + + vlanRangeVtx.value("resource-version").toString(); + dmaapMsgList.add(dmaapMsg); + vlanTagCount++; + } catch (Exception e) { + vlanTagFailureCount++; + logger.error("vlan-tag failure: ", e); + if(vlanTagJsonObject != null && vlanTagJsonObject.get("vlan-tag-id") != null){ + logger.info("vlan-tag with id : " + vlanTagJsonObject.get("vlan-tag-id") + " failed: ", e.getMessage()); + } + else { + logger.info("vlan-tag failure: ", e.getMessage()); + } + } + return vlanRangeVtx; + + } + + private String getCloudRegionIdNodeValue(Vertex vertex) { + String propertyValue = ""; + if (vertex != null && vertex.property(CLOUD_REGION_ID).isPresent()) { + propertyValue = vertex.property(CLOUD_REGION_ID).value().toString(); + } + return propertyValue; + } + + private String getCloudOwnerNodeValue(Vertex vertex) { + String propertyValue = ""; + if (vertex != null && vertex.property(CLOUD_OWNER).isPresent()) { + propertyValue = vertex.property(CLOUD_OWNER).value().toString(); + } + return propertyValue; + } + + private String getVlanTagIdNodeValue(Vertex vertex) { + String propertyValue = ""; + if (vertex != null && vertex.property(VLAN_TAG_ID).isPresent()) { + propertyValue = vertex.property(VLAN_TAG_ID).value().toString(); + } + return propertyValue; + } + + private String getCloudOwnerValueFromUrl(String url) { + String arr[] = url.split("/"); + return arr[6]; + } + + private String getCloudRegionIdValueFromUrl(String url) { + String arr[] = url.split("/"); + return arr[7]; + } + + protected void executeModifyOperation(Vertex fromNode, Vertex toNode) { + try { + + this.swingEdges(fromNode, toNode, "l3-network", "none", "BOTH"); + this.swingEdges(fromNode, toNode, "cp", "none", "BOTH"); + } catch (Exception e) { + logger.error("error encountered", e); + success = false; + } + } + + @Override + public void commit() { + engine.commit(); + createDmaapFiles(dmaapMsgList); + createDmaapFilesForDelete(dmaapDeleteIntrospectorList); + } + + @Override + public Status getStatus() { + if (success) { + return Status.SUCCESS; + } else { + return Status.FAILURE; + } + } + + @Override + public Optional<String[]> getAffectedNodeTypes() { + return Optional.of(new String[] { CLOUD_REGION_NODE_TYPE }); + } + + @Override + public String getMigrationName() { + return "MigrateVlanTag"; + } + + @Override + public List<Pair<Vertex, Vertex>> getAffectedNodePairs() { + return null; + } + + @Override + public String getNodeTypeRestriction() { + return null; + } + + @Override + public String getEdgeLabelRestriction() { + return null; + } + + @Override + public String getEdgeDirRestriction() { + return null; + } + + @Override + public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) { + } + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java b/src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java new file mode 100644 index 0000000..aec7224 --- /dev/null +++ b/src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java @@ -0,0 +1,120 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.rest; + +import org.onap.aai.audit.AuditGraphson2Sql; +import org.onap.aai.concurrent.AaiCallable; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.restcore.HttpMethod; +import org.onap.aai.restcore.RESTAPI; +import org.onap.aai.util.AAIConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import javax.ws.rs.*; +import javax.ws.rs.core.*; +import javax.ws.rs.core.Response.Status; + +@Component +@Path("{version: v1}/audit-sql-db") +public class AuditSqlDbConsumer extends RESTAPI { + + private static String SOURCE_FILE_NAME = "sourceFileName"; + private static String SOURCE_FILE_DIR_DEFAULT = "logs/data/dataSnapshots/"; + + private static final Logger LOGGER = LoggerFactory.getLogger(AuditSqlDbConsumer.class); + + private String rdbmsDbName; + private AuditGraphson2Sql auditGraphson2Sql; + + @Autowired + public AuditSqlDbConsumer( + AuditGraphson2Sql auditGraphson2Sql, + @Value("${aperture.rdbmsname}") String rdbmsDbName + ){ + this.auditGraphson2Sql = auditGraphson2Sql; + this.rdbmsDbName = rdbmsDbName; + } + + + @GET + @Consumes({ MediaType.APPLICATION_JSON}) + @Produces({ MediaType.APPLICATION_JSON}) + public Response executeAudit(String content, + @PathParam("uri") @Encoded String uri, + @Context HttpHeaders headers, + @Context UriInfo info){ + return runner(AAIConstants.AAI_GRAPHADMIN_TIMEOUT_ENABLED, + AAIConstants.AAI_GRAPHADMIN_TIMEOUT_APP, + AAIConstants.AAI_GRAPHADMIN_TIMEOUT_LIMIT, + headers, + info, + HttpMethod.GET, + new AaiCallable<Response>() { + @Override + public Response process() { + return processExecuteAudit(content, uri, headers, info); + } + } + ); + } + + + public Response processExecuteAudit(String content, + @PathParam("uri") @Encoded String uri, + @Context HttpHeaders headers, + @Context UriInfo info) { + + Response response = null; + try { + this.checkParams(info.getQueryParameters()); + + String resStr = auditGraphson2Sql.runAudit( rdbmsDbName, + info.getQueryParameters().getFirst(SOURCE_FILE_NAME), + SOURCE_FILE_DIR_DEFAULT ); + + LOGGER.info ("Completed"); + + response = Response.status(Status.OK) + .type(MediaType.APPLICATION_JSON) + .entity(resStr).build(); + + } catch (AAIException e) { + response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, e); + } catch (Exception e ) { + AAIException ex = new AAIException("AAI_4000", e); + response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, ex); + } + + return response; + } + + + public void checkParams(MultivaluedMap<String, String> params) throws AAIException { + + if (!params.containsKey(SOURCE_FILE_NAME)) { + throw new AAIException("AAI_6120", "parameter: sourceFileName (of snapshot file) is required. "); + } + } + +} diff --git a/src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java b/src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java new file mode 100644 index 0000000..34fdbec --- /dev/null +++ b/src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.rest.client; + +import org.onap.aai.restclient.RestClient; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; + + +@Configuration +public class ApertureConfiguration { + + @Bean(name = "apertureRestClient") + @ConditionalOnProperty(name = "aperture.service.client", havingValue = "two-way-ssl", matchIfMissing = true) + public RestClient apertureRestClientTwoWaySSL() { + return new ApertureServiceRestClient(); + } + + @Bean(name = "apertureRestClient") + @ConditionalOnProperty(name = "aperture.service.client", havingValue = "no-auth") + public RestClient apertureRestClientNoAuth() { + return new ApertureServiceNoAuthClient(); + } + + @Bean(name = "apertureRestClient") + @ConditionalOnProperty(name = "aperture.service.client", havingValue = "one-way-ssl") + public RestClient apertureRestClientOneWaySSL() { + return new ApertureServiceOneWayClient(); + } +} diff --git a/src/main/java/org/onap/aai/rest/client/ApertureService.java b/src/main/java/org/onap/aai/rest/client/ApertureService.java new file mode 100644 index 0000000..493a4cd --- /dev/null +++ b/src/main/java/org/onap/aai/rest/client/ApertureService.java @@ -0,0 +1,142 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.aai.rest.client; + + +import com.google.gson.JsonObject; +import org.apache.http.conn.ConnectTimeoutException; +import org.onap.aai.exceptions.AAIException; +import org.onap.aai.restclient.RestClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Profile; +import org.springframework.http.HttpMethod; +import org.springframework.http.ResponseEntity; +import org.springframework.stereotype.Service; +import javax.annotation.PostConstruct; +import java.net.ConnectException; +import java.net.SocketTimeoutException; +import java.util.*; + +@Service +public class ApertureService { + + /** + * Error indicating that the service trying to connect is down + */ + static final String CONNECTION_REFUSED_STRING = + "Connection refused to the Aperture microservice due to service unreachable"; + + /** + * Error indicating that the server is unable to reach the port + * Could be server related connectivity issue + */ + static final String CONNECTION_TIMEOUT_STRING = + "Connection timeout to the Aperture microservice as this could " + + "indicate the server is unable to reach port, " + + "please check on server by running: nc -w10 -z -v ${APERTURE_HOST} ${APERTURE_PORT}"; + + /** + * Error indicating that the request exceeded the allowed time + * + * Note: This means that the service could be active its + * just taking some time to process our request + */ + static final String REQUEST_TIMEOUT_STRING = + "Request to Aperture service took longer than the currently set timeout"; + + static final String APERTURE_ENDPOINT = "/v1/audit/"; + + private static final Logger LOGGER = LoggerFactory.getLogger(ApertureService.class); + private final RestClient apertureRestClient; + private final String appName; + + + @Autowired + public ApertureService( + @Qualifier("apertureRestClient") RestClient apertureRestClient, + @Value("${spring.application.name}") String appName + ){ + this.apertureRestClient = apertureRestClient; + this.appName = appName; + + LOGGER.info("Successfully initialized the aperture service"); + } + + + public JsonObject runAudit(Long tStamp, String dbName) throws AAIException { + + Map<String, String> httpHeaders = new HashMap<>(); + + httpHeaders.put("X-FromAppId", appName); + httpHeaders.put("X-TransactionID", UUID.randomUUID().toString()); + httpHeaders.put("Accept", "application/json"); + + String queryParams = "?timestamp=" + tStamp + "&dbname=" + dbName; + + ResponseEntity responseEntity = null; + try { + responseEntity = apertureRestClient.execute( + APERTURE_ENDPOINT + queryParams, + HttpMethod.GET, + httpHeaders + ); + + if(isSuccess(responseEntity)){ + LOGGER.debug("Audit returned following response status code {} and body {}", responseEntity.getStatusCodeValue(), responseEntity.getBody()); + } + + } catch(Exception e){ + // If the exception cause is client side timeout + // then proceed as if it passed validation + // resources microservice shouldn't be blocked because of validation service + // is taking too long or if the validation service is down + // Any other exception it should block the request from passing? + if(e.getCause() instanceof SocketTimeoutException){ + LOGGER.error(REQUEST_TIMEOUT_STRING, e.getCause()); + } else if(e.getCause() instanceof ConnectException){ + LOGGER.error(CONNECTION_REFUSED_STRING, e.getCause()); + } else if(e.getCause() instanceof ConnectTimeoutException){ + LOGGER.error(CONNECTION_TIMEOUT_STRING, e.getCause()); + } else { + LOGGER.error("Unknown exception thrown please investigate", e.getCause()); + } + } + + JsonObject jsonResults = new JsonObject (); + if( responseEntity != null ) { + jsonResults = (JsonObject) (responseEntity.getBody()); + } + + return jsonResults; + } + + + + boolean isSuccess(ResponseEntity responseEntity){ + return responseEntity != null && responseEntity.getStatusCode().is2xxSuccessful(); + } + + + +} diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java new file mode 100644 index 0000000..778a821 --- /dev/null +++ b/src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.aai.rest.client; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import org.onap.aai.restclient.NoAuthRestClient; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.util.MultiValueMap; + +import java.util.Collections; +import java.util.Map; +import java.util.UUID; + +public class ApertureServiceNoAuthClient extends NoAuthRestClient { + + private static EELFLogger logger = EELFManager.getInstance().getLogger(ApertureServiceNoAuthClient.class); + + @Value("${aperture.service.base.url}") + private String baseUrl; + + @Value("${aperture.service.timeout-in-milliseconds}") + private Integer timeout; + + @Override + protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception { + HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory(); + requestFactory.setConnectionRequestTimeout(timeout); + requestFactory.setReadTimeout(timeout); + requestFactory.setConnectTimeout(timeout); + return requestFactory; + } + + @Override + public String getBaseUrl() { + return baseUrl; + } + + @Override + public MultiValueMap<String, String> getHeaders(Map<String, String> headers) { + HttpHeaders httpHeaders = new HttpHeaders(); + + String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString()); + String defaultContentType = + headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString()); + + if (headers.isEmpty()) { + httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept))); + httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType)); + } + + httpHeaders.add("X-FromAppId", appName); + httpHeaders.add("X-TransactionId", UUID.randomUUID().toString()); + headers.forEach(httpHeaders::add); + return httpHeaders; + } + +} diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java new file mode 100644 index 0000000..a888536 --- /dev/null +++ b/src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java @@ -0,0 +1,92 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.aai.rest.client; + +import org.onap.aai.restclient.OneWaySSLRestClient; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.util.MultiValueMap; + +import java.util.Collections; +import java.util.Map; +import java.util.UUID; + +public class ApertureServiceOneWayClient extends OneWaySSLRestClient { + + @Value("${aperture.service.base.url}") + private String baseUrl; + + @Value("${aperture.service.ssl.trust-store}") + private String truststorePath; + + @Value("${aperture.service.ssl.trust-store-password}") + private String truststorePassword; + + @Value("${aperture.service.timeout-in-milliseconds}") + private Integer timeout; + + @Override + public String getBaseUrl() { + return baseUrl; + } + + @Override + protected String getTruststorePath() { + return truststorePath; + } + + @Override + protected char[] getTruststorePassword() { + return truststorePassword.toCharArray(); + } + + @Override + protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception { + HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory(); + requestFactory.setConnectionRequestTimeout(timeout); + requestFactory.setReadTimeout(timeout); + requestFactory.setConnectTimeout(timeout); + return requestFactory; + } + + @Override + public MultiValueMap<String, String> getHeaders(Map<String, String> headers) { + HttpHeaders httpHeaders = new HttpHeaders(); + + String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString()); + String defaultContentType = + headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString()); + + if (headers.isEmpty()) { + httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept))); + httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType)); + } + + httpHeaders.add("X-FromAppId", appName); + httpHeaders.add("X-TransactionId", UUID.randomUUID().toString()); + httpHeaders.add("X-TransactionId", appName); + headers.forEach(httpHeaders::add); + return httpHeaders; + } + +} diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java new file mode 100644 index 0000000..da32fbe --- /dev/null +++ b/src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java @@ -0,0 +1,106 @@ +/** + * ============LICENSE_START======================================================= + * org.onap.aai + * ================================================================================ + * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.aai.rest.client; + +import org.onap.aai.restclient.TwoWaySSLRestClient; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.util.MultiValueMap; + +import java.util.Collections; +import java.util.Map; +import java.util.UUID; + +public class ApertureServiceRestClient extends TwoWaySSLRestClient { + + @Value("${aperture.service.base.url}") + private String baseUrl; + + @Value("${aperture.service.ssl.key-store}") + private String keystorePath; + + @Value("${aperture.service.ssl.trust-store}") + private String truststorePath; + + @Value("${aperture.service.ssl.key-store-password}") + private String keystorePassword; + + @Value("${aperture.service.ssl.trust-store-password}") + private String truststorePassword; + + @Value("${aperture.service.timeout-in-milliseconds}") + private Integer timeout; + + @Override + public String getBaseUrl() { + return baseUrl; + } + + @Override + protected String getKeystorePath() { + return keystorePath; + } + + @Override + protected String getTruststorePath() { + return truststorePath; + } + + @Override + protected char[] getKeystorePassword() { + return keystorePassword.toCharArray(); + } + + @Override + protected char[] getTruststorePassword() { + return truststorePassword.toCharArray(); + } + + protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception { + HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory(); + requestFactory.setConnectionRequestTimeout(timeout); + requestFactory.setReadTimeout(timeout); + requestFactory.setConnectTimeout(timeout); + return requestFactory; + } + + @Override + public MultiValueMap<String, String> getHeaders(Map<String, String> headers) { + HttpHeaders httpHeaders = new HttpHeaders(); + + String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString()); + String defaultContentType = + headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString()); + + if (headers.isEmpty()) { + httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept))); + httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType)); + } + + httpHeaders.add("X-FromAppId", appName); + httpHeaders.add("X-TransactionId", UUID.randomUUID().toString()); + headers.forEach(httpHeaders::add); + return httpHeaders; + } + +} diff --git a/src/main/java/org/onap/aai/schema/GenTester.java b/src/main/java/org/onap/aai/schema/GenTester.java index 84ecf19..160ec22 100644 --- a/src/main/java/org/onap/aai/schema/GenTester.java +++ b/src/main/java/org/onap/aai/schema/GenTester.java @@ -24,7 +24,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.schema.JanusGraphManagement; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbgen.SchemaGenerator; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.exceptions.AAIException; diff --git a/src/main/java/org/onap/aai/schema/GenTester4Hist.java b/src/main/java/org/onap/aai/schema/GenTester4Hist.java index eefb7b0..05478e2 100644 --- a/src/main/java/org/onap/aai/schema/GenTester4Hist.java +++ b/src/main/java/org/onap/aai/schema/GenTester4Hist.java @@ -24,7 +24,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.janusgraph.core.JanusGraph; import org.janusgraph.core.schema.JanusGraphManagement; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbgen.SchemaGenerator4Hist; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.exceptions.AAIException; diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java index 0bd254f..4297886 100644 --- a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java +++ b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java @@ -21,7 +21,7 @@ package org.onap.aai.util; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotifications.java b/src/main/java/org/onap/aai/util/SendMigrationNotifications.java index dfe2649..c51de1e 100644 --- a/src/main/java/org/onap/aai/util/SendMigrationNotifications.java +++ b/src/main/java/org/onap/aai/util/SendMigrationNotifications.java @@ -54,8 +54,8 @@ public class SendMigrationNotifications { private String config; private String path; private Set<String> notifyOn; - private long sleepInMilliSecs; - private int numToBatch; + long sleepInMilliSecs; + int numToBatch; private String requestId; private EventAction eventAction; private String eventSource; @@ -108,22 +108,25 @@ public class SendMigrationNotifications { vertexes = g.V(entry.getKey()).toList(); if (vertexes == null || vertexes.isEmpty()) { logAndPrint("Vertex " + entry.getKey() + " no longer exists." ); + continue; } else if (vertexes.size() > 1) { logAndPrint("Vertex " + entry.getKey() + " query returned " + vertexes.size() + " vertexes." ); + continue; } else { logger.debug("Processing " + entry.getKey() + "resource-version " + entry.getValue()); v = vertexes.get(0); - if ((notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString())) - && entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) { - Introspector introspector = serializer.getLatestVersionView(v); - uri = this.serializer.getURIForVertex(v, false); - this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath); - count++; - if (count >= this.numToBatch) { - trigger(); - logger.debug("Triggered " + entry.getKey()); - count = 0; - Thread.sleep(this.sleepInMilliSecs); + if (notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString())) { + if (entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) { + Introspector introspector = serializer.getLatestVersionView(v); + uri = this.serializer.getURIForVertex(v, false); + this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath); + count++; + if (count >= this.numToBatch) { + trigger(); + logger.debug("Triggered " + entry.getKey()); + count = 0; + Thread.sleep(this.sleepInMilliSecs); + } } } } diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java index d3670f2..0eeaecd 100644 --- a/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java +++ b/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java @@ -21,7 +21,7 @@ package org.onap.aai.util; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; -import org.onap.aai.config.PropertyPasswordConfiguration; +import org.onap.aai.restclient.PropertyPasswordConfiguration; import org.onap.aai.dbmap.AAIGraph; import org.onap.aai.exceptions.AAIException; import org.onap.aai.introspection.LoaderFactory; diff --git a/src/main/java/org/onap/aai/web/JerseyConfiguration.java b/src/main/java/org/onap/aai/web/JerseyConfiguration.java index 629d11e..f193b8c 100644 --- a/src/main/java/org/onap/aai/web/JerseyConfiguration.java +++ b/src/main/java/org/onap/aai/web/JerseyConfiguration.java @@ -32,6 +32,7 @@ import org.apache.tinkerpop.gremlin.structure.T; import org.glassfish.jersey.filter.LoggingFilter; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletProperties; +import org.onap.aai.rest.AuditSqlDbConsumer; import org.onap.aai.rest.QueryConsumer; import org.onap.aai.rest.util.EchoResponse; import org.onap.logging.filter.base.AuditLogContainerFilter; @@ -54,7 +55,7 @@ public class JerseyConfiguration extends ResourceConfig { this.env = env; register(QueryConsumer.class); - + register(AuditSqlDbConsumer.class); register(EchoResponse.class); //Filters |