aboutsummaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rwxr-xr-xsrc/main/docker/Dockerfile25
-rw-r--r--src/main/docker/aai.sh9
-rw-r--r--src/main/docker/docker-entrypoint.sh10
-rw-r--r--src/main/java/org/onap/aai/dataexport/DataExportTasks.java56
-rw-r--r--src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java18
-rw-r--r--src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java10
-rw-r--r--src/main/java/org/onap/aai/historytruncate/HistoryTruncateTasks.java12
-rw-r--r--src/main/scripts/add_vertex_label.sh30
-rw-r--r--src/main/scripts/audit_schema.sh1
-rw-r--r--src/main/scripts/common_functions.sh15
-rw-r--r--src/main/scripts/createDBSchema.sh8
-rw-r--r--src/main/scripts/dataGrooming.sh41
-rw-r--r--src/main/scripts/dataRestoreFromSnapshot.sh13
-rw-r--r--src/main/scripts/dataRestoreFromSnapshotMulti.sh13
-rw-r--r--src/main/scripts/dataSnapshot.sh3
-rw-r--r--src/main/scripts/dupeTool.sh21
-rw-r--r--src/main/scripts/dynamicPayloadArchive.sh10
-rw-r--r--src/main/scripts/dynamicPayloadGenerator.sh29
-rw-r--r--src/main/scripts/dynamicPayloadPartial.sh4
-rw-r--r--src/main/scripts/extract-events.sh6
-rw-r--r--src/main/scripts/forceDeleteTool.sh29
-rw-r--r--src/main/scripts/getDslResult.sh10
-rw-r--r--src/main/scripts/historyCreateDBSchema.sh7
-rw-r--r--src/main/scripts/historyDbInitialLoad.sh7
-rw-r--r--src/main/scripts/historySchemaMod.sh15
-rw-r--r--src/main/scripts/historyTruncateDb.sh19
-rw-r--r--src/main/scripts/migration_verification.sh8
-rw-r--r--src/main/scripts/preDataRestore.sh13
-rw-r--r--src/main/scripts/resend-dmaap-events.sh46
-rw-r--r--src/main/scripts/run_Migrations.sh7
-rw-r--r--src/main/scripts/run_SendDeleteMigrationNotification.sh9
-rw-r--r--src/main/scripts/run_SendMigrationNotification.sh9
-rw-r--r--src/main/scripts/schemaMod.sh23
-rw-r--r--src/main/scripts/uniquePropertyCheck.sh7
-rw-r--r--src/main/scripts/updatePem.sh5
-rw-r--r--src/main/scripts/updatePropertyTool.sh3
36 files changed, 250 insertions, 301 deletions
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
index 731da9a..6a1a05c 100755
--- a/src/main/docker/Dockerfile
+++ b/src/main/docker/Dockerfile
@@ -1,25 +1,24 @@
-FROM @aai.docker.namespace@/aai-common-@aai.base.image@:@aai.base.image.version@
+FROM eclipse-temurin:8-jre-alpine
USER root
+ENV SERVER_PORT=8449
+EXPOSE ${SERVER_PORT}
-RUN mkdir -p /opt/aaihome/aaiadmin /opt/aai/logroot/AAI-GA /opt/app/aai-graphadmin/logs/gc
+# curl is used in the putTool script
+# (PUT's can't be done using the busybox wget)
+RUN apk --no-cache add curl
+
+RUN mkdir -p /opt/aaihome/aaiadmin /opt/aai/logroot/AAI-GA /opt/app/aai-graphadmin/logs/gc
VOLUME /opt/aai/logroot/AAI-GA
VOLUME /opt/data
VOLUME /opt/tools
-HEALTHCHECK --interval=40s --timeout=10s --retries=3 CMD nc -z -v localhost 8449 || exit 1
-
# Add the proper files into the docker image from your build
WORKDIR /opt/app/aai-graphadmin
-RUN chown -R aaiadmin:aaiadmin /opt/app/aai-graphadmin /etc/profile.d /opt/aai/logroot/AAI-GA /opt/app /opt/aai/logroot /opt/app/aai-graphadmin/logs/gc
-
-COPY --chown=aaiadmin:aaiadmin /maven/aai-graphadmin/ .
+RUN chown -R nobody:nobody /opt/app/aai-graphadmin /etc/profile.d /opt/aai/logroot/AAI-GA /opt/app /opt/aai/logroot /opt/app/aai-graphadmin/logs/gc
-USER aaiadmin
+COPY --chown=nobody:nobody /maven/aai-graphadmin/ .
-ENV AAI_BUILD_VERSION @aai.docker.version@
-# Expose the ports for outside linux to use
-# 8449 is the important one to be used
-EXPOSE 8449
-ENTRYPOINT ["/bin/bash", "/opt/app/aai-graphadmin/docker-entrypoint.sh"] \ No newline at end of file
+USER nobody
+ENTRYPOINT ["/bin/sh", "/opt/app/aai-graphadmin/docker-entrypoint.sh"]
diff --git a/src/main/docker/aai.sh b/src/main/docker/aai.sh
index 4d8b6ce..c1c0398 100644
--- a/src/main/docker/aai.sh
+++ b/src/main/docker/aai.sh
@@ -20,15 +20,6 @@
# ECOMP is a trademark and service mark of AT&T Intellectual Property.
#
-# set system related env
-# and make script compatible both with ubuntu and alpine base images
-# jre-alpine image has $JAVA_HOME set and added to $PATH
-# ubuntu image requires to set $JAVA_HOME and add java to $PATH manually
-if [ -z $JAVA_HOME ] && [ $(grep -i "ID=ubuntu" /etc/os-release | wc -w) -eq 1 ] ; then
- export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-`dpkg --print-architecture | awk -F- '{ print $NF }'`
- export PATH=$PATH:${JAVA_HOME}/jre/bin:${JAVA_HOME}/bin
-fi
-
# set app related env
export PROJECT_HOME=/opt/app/aai-graphadmin
export AAIENV=dev
diff --git a/src/main/docker/docker-entrypoint.sh b/src/main/docker/docker-entrypoint.sh
index 22da6cf..78bb479 100644
--- a/src/main/docker/docker-entrypoint.sh
+++ b/src/main/docker/docker-entrypoint.sh
@@ -7,9 +7,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -23,8 +23,6 @@ RESOURCES_HOME=${APP_HOME}/resources/;
export SERVER_PORT=${SERVER_PORT:-8449};
-echo "Project Build Version: ${AAI_BUILD_VERSION}";
-
find /opt/app/ -name "*.sh" -exec chmod +x {} +
if [ -f ${APP_HOME}/aai.sh ]; then
@@ -98,10 +96,6 @@ fi
JAVA_OPTS="${JAVA_OPTS} -Dserver.port=${SERVER_PORT}";
JAVA_OPTS="${JAVA_OPTS} -DBUNDLECONFIG_DIR=./resources";
JAVA_OPTS="${JAVA_OPTS} -Dserver.local.startpath=${RESOURCES_HOME}";
-JAVA_OPTS="${JAVA_OPTS} -DAAI_CHEF_ENV=${AAI_CHEF_ENV}";
-JAVA_OPTS="${JAVA_OPTS} -DSCLD_ENV=${SCLD_ENV}";
-JAVA_OPTS="${JAVA_OPTS} -DAFT_ENVIRONMENT=${AFT_ENVIRONMENT}";
-JAVA_OPTS="${JAVA_OPTS} -DlrmName=com.att.ajsc.aai-graphadmin";
JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_VERSION=${AAI_BUILD_VERSION}";
JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom";
JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=./resources/logback.xml";
diff --git a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
index 143312b..8d4ebb0 100644
--- a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
+++ b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
@@ -78,7 +78,7 @@ public class DataExportTasks {
private static final Logger LOGGER;
private final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
-
+
static {
System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName());
Properties props = System.getProperties();
@@ -103,10 +103,10 @@ public class DataExportTasks {
*/
@Scheduled(cron = "${dataexporttask.cron}" )
public void export() {
-
+
try {
exportTask();
- }
+ }
catch (Exception e) {
ErrorLogHelper.logError("AAI_8002", "Exception while running export "+ LogFormatTools.getStackTop(e));
}
@@ -138,14 +138,14 @@ public class DataExportTasks {
}
LOGGER.debug("Started exportTask: " + dateFormat.format(new Date()));
-
+
String enableSchemaValidation = AAIConfig.get("aai.dataexport.enable.schema.validation", "false");
String outputLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.output.location");
String enableMultipleSnapshots = AAIConfig.get("aai.dataexport.enable.multiple.snapshots", "false");
String nodeConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.node.config.location");
String inputFilterConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.input.filter.config.location");
String enablePartialGraph = AAIConfig.get("aai.dataexport.enable.partial.graph", "true");
-
+
// Check that the output location exist
File targetDirFile = new File(outputLocation);
if ( !targetDirFile.exists() ) {
@@ -155,7 +155,7 @@ public class DataExportTasks {
//Delete any existing payload files
deletePayload(targetDirFile);
}
-
+
File snapshot = null;
String snapshotFilePath = null;
if ( "false".equalsIgnoreCase(enableMultipleSnapshots)){
@@ -172,7 +172,7 @@ public class DataExportTasks {
else {
snapshotFilePath = findMultipleSnapshots();
}
-
+
List<String> paramsList = new ArrayList<>();
paramsList.add("-s");
paramsList.add(enableSchemaValidation);
@@ -188,10 +188,10 @@ public class DataExportTasks {
paramsList.add(enablePartialGraph);
paramsList.add("-d");
paramsList.add(snapshotFilePath);
-
+
LOGGER.debug("paramsList is : " + paramsList);
-
- String[] paramsArray = paramsList.toArray(new String[0]);
+
+ String[] paramsArray = paramsList.toArray(new String[0]);
try {
DynamicPayloadGenerator.run(loaderFactory, edgeIngestor, schemaVersions, paramsArray, false);
LOGGER.debug("DynamicPaylodGenerator completed");
@@ -208,7 +208,7 @@ public class DataExportTasks {
}
LOGGER.info("Ended exportTask: " + dateFormat.format(new Date()));
auditLog.logAfter();
-
+
}
/**
* The isDataExportRunning method, checks if the data export task was started separately via command line
@@ -220,7 +220,7 @@ public class DataExportTasks {
int count = 0;
try {
- process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ynamicPayloadGenerator'").start();
+ process = new ProcessBuilder().command("sh", "-c", "ps -ef | grep '[D]ynamicPayloadGenerator'").start();
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
@@ -244,11 +244,11 @@ public class DataExportTasks {
* @return a single snapshot File
*/
private static File findSnapshot() {
- String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" +
+ String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" +
AAIConstants.AAI_FILESEP + "dataSnapshots";
File snapshot = null;
File targetDirFile = new File(targetDir);
-
+
File[] allFilesArr = targetDirFile.listFiles((FileFilter) FileFileFilter.FILE);
if ( allFilesArr == null || allFilesArr.length == 0 ) {
ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir);
@@ -265,18 +265,18 @@ public class DataExportTasks {
}
return (snapshot);
}
-
+
/**
* The method findMultipleSnapshots looks in the data snapshots directory for a set of snapshot files that match the pattern.
* @return the file name prefix corresponding to the second to last set of snapshots
*/
private static String findMultipleSnapshots() {
- String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" +
+ String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" +
AAIConstants.AAI_FILESEP + "dataSnapshots";
String snapshotName = null;
File targetDirFile = new File(targetDir);
TreeMap<String,List<File>> fileMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
-
+
/*dataSnapshot.graphSON.201804022009.P0
dataSnapshot.graphSON.201804022009.P1
dataSnapshot.graphSON.201804022009.P2
@@ -284,16 +284,16 @@ public class DataExportTasks {
dataSnapshot.graphSON.201804022009.P4*/
String snapshotPattern = "^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$";
Pattern p = Pattern.compile (snapshotPattern);
-
+
FileFilter fileFilter = new RegexFileFilter("^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$");
File[] allFilesArr = targetDirFile.listFiles(fileFilter);
-
+
if ( allFilesArr == null || allFilesArr.length == 0 ) {
ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir);
LOGGER.debug("Unable to find data snapshots at " + targetDir);
return (null);
}
-
+
if ( allFilesArr.length > 1 ) {
Arrays.sort(allFilesArr, LastModifiedFileComparator.LASTMODIFIED_REVERSE);
for ( File f : allFilesArr ) {
@@ -317,13 +317,13 @@ public class DataExportTasks {
}
if ( fileMap.size() > 1 ) {
NavigableMap<String,List<File>> dmap = fileMap.descendingMap();
-
+
Map.Entry<String,List<File>> fentry = dmap.firstEntry();
LOGGER.debug ("First key in descending map " + fentry.getKey());
-
+
Map.Entry<String,List<File>> lentry = dmap.higherEntry(fentry.getKey());
LOGGER.debug ("Next key in descending map " + lentry.getKey());
-
+
List<File> l = lentry.getValue();
snapshotName = l.get(0).getAbsolutePath();
// Remove the .P* extension
@@ -348,7 +348,7 @@ public class DataExportTasks {
* @throws AAIException
*/
private static void deletePayload(File targetDirFile) {
-
+
File[] allFilesArr = targetDirFile.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY);
if ( allFilesArr == null || allFilesArr.length == 0 ) {
LOGGER.debug("No payload files found at " + targetDirFile.getPath());
@@ -359,12 +359,12 @@ public class DataExportTasks {
FileUtils.deleteDirectory(f);
}
catch (IOException e) {
-
+
LOGGER.debug("Unable to delete directory " + f.getAbsolutePath() + " " + e.getMessage());
}
-
+
}
-
+
}
/**
* The runScript method runs a shell script/command with a variable number of arguments
@@ -380,6 +380,6 @@ public class DataExportTasks {
ErrorLogHelper.logError("AAI_8002", "Exception while running dynamicPayloadArchive.sh "+ LogFormatTools.getStackTop(e));
LOGGER.debug("Exception while running dynamicPayloadArchive.sh" + LogFormatTools.getStackTop(e));
}
-
+
}
}
diff --git a/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java b/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
index 21f3172..4e162ca 100644
--- a/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
+++ b/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
@@ -49,7 +49,7 @@ import org.springframework.stereotype.Component;
public class DataGroomingTasks {
private AaiScheduledTaskAuditLog auditLog;
-
+
private static final Logger LOGGER = LoggerFactory.getLogger(DataGroomingTasks.class);
private final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
@@ -70,7 +70,7 @@ public class DataGroomingTasks {
return;
}
}
-
+
LOGGER.debug("Started cron job dataGrooming @ " + dateFormat.format(new Date()));
Map<String, String> dataGroomingFlagMap = new HashMap<>();
@@ -111,7 +111,7 @@ public class DataGroomingTasks {
paramsArray.add("-dontFixOrphans");
}
if("true".equals(dataGroomingFlagMap.get("enabletimewindowminutes"))){
- paramsArray.add("-timeWindowMinutes");
+ paramsArray.add("-timeWindowMinutes");
paramsArray.add(dataGroomingFlagMap.get("timewindowminutesvalue"));
}
if("true".equals(dataGroomingFlagMap.get("enableskiphostcheck"))){
@@ -119,19 +119,19 @@ public class DataGroomingTasks {
}
if("true".equals(dataGroomingFlagMap.get("enablesleepminutes"))) {
- paramsArray.add("-sleepMinutes");
+ paramsArray.add("-sleepMinutes");
paramsArray.add(dataGroomingFlagMap.get("sleepminutesvalue"));
}
-
+
if("true".equals(dataGroomingFlagMap.get("enableedgesonly"))){
paramsArray.add("-edgesOnly");
}
if("true".equals(dataGroomingFlagMap.get("enableskipedgechecks"))) {
paramsArray.add("-skipEdgeChecks");
}
-
+
if("true".equals(dataGroomingFlagMap.get("enablemaxfix"))) {
- paramsArray.add("-maxFix");
+ paramsArray.add("-maxFix");
paramsArray.add(dataGroomingFlagMap.get("maxfixvalue"));
}
if("true".equals(dataGroomingFlagMap.get("enabledupecheckoff"))){
@@ -151,7 +151,7 @@ public class DataGroomingTasks {
paramsArray.add("-f");
paramsArray.add(dataGroomingFlagMap.get("fvalue"));
}
-
+
DataGrooming dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
String[] paramsList = paramsArray.toArray(new String[0]);
if (AAIConfig.get("aai.cron.enable.dataGrooming").equals("true")) {
@@ -174,7 +174,7 @@ public class DataGroomingTasks {
int count = 0;
try {
- process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ataGrooming'").start();
+ process = new ProcessBuilder().command("sh", "-c", "ps -ef | grep '[D]ataGrooming'").start();
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
index 15fff4b..d8cb65a 100644
--- a/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
+++ b/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
@@ -45,13 +45,13 @@ import org.slf4j.MDC;
@Component
@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
public class DataSnapshotTasks {
-
+
private AaiScheduledTaskAuditLog auditLog;
-
+
private static final Logger LOGGER = LoggerFactory.getLogger(DataSnapshotTasks.class);
private final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
-
+
@Scheduled(cron = "${datasnapshottasks.cron}" )
public void snapshotScheduleTask() throws AAIException, Exception {
auditLog = new AaiScheduledTaskAuditLog();
@@ -86,7 +86,7 @@ public class DataSnapshotTasks {
int count = 0;
try {
- process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ataSnapshot'").start();
+ process = new ProcessBuilder().command("sh", "-c", "ps -ef | grep '[D]ataSnapshot'").start();
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
@@ -103,5 +103,3 @@ public class DataSnapshotTasks {
return count > 0;
}
}
-
-
diff --git a/src/main/java/org/onap/aai/historytruncate/HistoryTruncateTasks.java b/src/main/java/org/onap/aai/historytruncate/HistoryTruncateTasks.java
index ec6fac3..a2ff86c 100644
--- a/src/main/java/org/onap/aai/historytruncate/HistoryTruncateTasks.java
+++ b/src/main/java/org/onap/aai/historytruncate/HistoryTruncateTasks.java
@@ -52,7 +52,7 @@ public class HistoryTruncateTasks {
@Scheduled(cron = "${historytruncatetasks.cron}" )
public void historyTruncateScheduleTask() throws AAIException, Exception {
-
+
if(!"true".equals(AAIConfig.get("aai.disable.check.historytruncate.running", "false"))){
if(checkIfHistoryTruncateIsRunning()){
LOGGER.debug("History Truncate is already running on the system");
@@ -65,9 +65,9 @@ public class HistoryTruncateTasks {
try {
if (AAIConfig.get("aai.cron.enable.historytruncate").equals("true")) {
// Until we're comfortable with how it is working, we will keep it in "LOG_ONLY" mode
- String defaultTruncMode = "LOG_ONLY";
- String defaultTruncWindowDays = "999";
- String [] params = {"-truncateMode",defaultTruncMode,"-truncateWindowDays",defaultTruncWindowDays};
+ String defaultTruncMode = "LOG_ONLY";
+ String defaultTruncWindowDays = "999";
+ String [] params = {"-truncateMode",defaultTruncMode,"-truncateWindowDays",defaultTruncWindowDays};
HistoryTruncate.main(params);
}
}
@@ -87,7 +87,7 @@ public class HistoryTruncateTasks {
int count = 0;
try {
- process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[H]istoryTruncate'").start();
+ process = new ProcessBuilder().command("sh", "-c", "ps -ef | grep '[H]istoryTruncate'").start();
InputStream is = process.getInputStream();
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
@@ -104,5 +104,3 @@ public class HistoryTruncateTasks {
return count > 0;
}
}
-
-
diff --git a/src/main/scripts/add_vertex_label.sh b/src/main/scripts/add_vertex_label.sh
index f026bd0..198de68 100644
--- a/src/main/scripts/add_vertex_label.sh
+++ b/src/main/scripts/add_vertex_label.sh
@@ -1,15 +1,15 @@
-#!/bin/bash
-
-filename=$1;
-
-if [ -z "${filename}" ]; then
- echo "Please provide a graphson file";
- exit 1;
-fi;
-
-if [ ! -f "${filename}" ]; then
- echo "Unable to find the graphson file ${filename}";
- exit 1;
-fi;
-
-sed 's/"label":"vertex"\(.*\)"aai-node-type":\[{"id":"\([^"]*\)","value":"\([^"]*\)"/"label":"\3"\1"aai-node-type":[{"id":"\2","value":"\3"/g' ${filename} > "with_label_${filename}"; \ No newline at end of file
+#!/bin/sh
+
+filename=$1;
+
+if [ -z "${filename}" ]; then
+ echo "Please provide a graphson file";
+ exit 1;
+fi;
+
+if [ ! -f "${filename}" ]; then
+ echo "Unable to find the graphson file ${filename}";
+ exit 1;
+fi;
+
+sed 's/"label":"vertex"\(.*\)"aai-node-type":\[{"id":"\([^"]*\)","value":"\([^"]*\)"/"label":"\3"\1"aai-node-type":[{"id":"\2","value":"\3"/g' ${filename} > "with_label_${filename}";
diff --git a/src/main/scripts/audit_schema.sh b/src/main/scripts/audit_schema.sh
index 686dd49..0818e20 100644
--- a/src/main/scripts/audit_schema.sh
+++ b/src/main/scripts/audit_schema.sh
@@ -25,7 +25,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
execute_spring_jar org.onap.aai.db.schema.ScriptDriver "/opt/app/aai-graphadmin/resources/logback.xml" "$@"
end_date;
diff --git a/src/main/scripts/common_functions.sh b/src/main/scripts/common_functions.sh
index 1fac7ce..e132798 100644
--- a/src/main/scripts/common_functions.sh
+++ b/src/main/scripts/common_functions.sh
@@ -1,19 +1,8 @@
-#!/bin/ksh
+#!/bin/sh
#
# Common functions that can be used throughout multiple scripts
# In order to call these functions, this file needs to be sourced
-# Checks if the user that is currently running is aaiadmin
-check_user(){
-
- userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
-
- if [ "${userid}" != "aaiadmin" ]; then
- echo "You must be aaiadmin to run $0. The id used $userid."
- exit 1
- fi
-}
-
# Sources the profile and sets the project home
source_profile(){
PROJECT_HOME=/opt/app/aai-graphadmin
@@ -49,7 +38,7 @@ execute_spring_jar(){
JAVA_OPTS="${JAVA_OPTS} ${JAVA_POST_OPTS}";
"${JAVA_HOME}/bin/java" ${JVM_OPTS} ${JAVA_OPTS} -jar ${EXECUTABLE_JAR} "$@" || {
- echo "Failed to run the tool $0 successfully";
+ echo "Failed to run the tool $0";
exit 1;
}
}
diff --git a/src/main/scripts/createDBSchema.sh b/src/main/scripts/createDBSchema.sh
index 01fef07..b9b8aeb 100644
--- a/src/main/scripts/createDBSchema.sh
+++ b/src/main/scripts/createDBSchema.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# ============LICENSE_START=======================================================
# org.onap.aai
@@ -30,10 +30,10 @@
#
#
-COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+set -x;
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
if [ -z "$1" ]; then
execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml
@@ -41,4 +41,4 @@ else
execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "$1"
fi;
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/dataGrooming.sh b/src/main/scripts/dataGrooming.sh
index a6b5f4f..89fd32a 100644
--- a/src/main/scripts/dataGrooming.sh
+++ b/src/main/scripts/dataGrooming.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# The script invokes the dataGrooming java class to run some tests and generate a report and
# potentially do some auto-deleteing.
@@ -6,7 +6,7 @@
# Here are the allowed Parameters. Note - they are all optional and can be mixed and matched.
#
# -f oldFileName (see note below)
-# -autoFix
+# -autoFix
# -sleepMinutes nn
# -edgesOnly
# -skipEdges
@@ -24,40 +24,40 @@
#
#
# NOTES:
-# -f The name of a previous report can optionally be passed in with the "-f" option.
-# Just the filename -- ie. "dataGrooming.sh -f dataGrooming.201504272106.out"
+# -f The name of a previous report can optionally be passed in with the "-f" option.
+# Just the filename -- ie. "dataGrooming.sh -f dataGrooming.201504272106.out"
# The file will be assumed to be in the directory that it was created in.
# If a filename is passed, then the "deleteCandidate" vertex-id's and bad edges
# listed inside that report file will be deleted on this run if they are encountered as
# bad nodes/edges again.
-#
+#
# -autoFix If you don't use the "-f" option, you could choose to use "-autofix" which will
-# automatically run the script twice: once to look for problems, then after
+# automatically run the script twice: once to look for problems, then after
# sleeping for a few minutes, it will re-run with the inital-run's output as
-# an input file.
+# an input file.
#
# -maxFix When using autoFix, you might want to limit how many 'bad' records get fixed.
# This is a safeguard against accidently deleting too many records automatically.
# It has a default value set in AAIConstants: AAI_GROOMING_DEFAULT_MAX_FIX = 15;
-# If there are more than maxFix candidates found -- then none will be deleted (ie.
+# If there are more than maxFix candidates found -- then none will be deleted (ie.
# someone needs to look into it)
-#
+#
# -sleepMinutes When using autoFix, this defines how many minutes we sleep before the second run.
# It has a default value set in AAIConstants: AAI_GROOMING_DEFAULT_SLEEP_MINUTES = 7;
# The reason we sleep at all between runs is that our DB is "eventually consistant", so
# we want to give it time to resolve itself if possible.
#
# -edgesOnly Can be used any time you want to limit this tool so it only looks at edges.
-# Note - as of 1710, we have not been seeing many purely bad edges,
+# Note - as of 1710, we have not been seeing many purely bad edges,
# (ie. not associated with a phantom node) so this option is not used often.
-#
+#
# -skipEdgeChecks Use it to bypass checks for bad Edges (which are pretty rare).
#
# -timeWindowMinutes Use it to limit the nodes looked at to ones whose update-timestamp tells us that it was last updated less than this many minutes ago. Note this is usually used along with the skipEdgeChecks option.
#
-# -dontFixOrphans Since there can sometimes be a lot of orphan nodes, and they don't
+# -dontFixOrphans Since there can sometimes be a lot of orphan nodes, and they don't
# harm processing as much as phantom-nodes or bad-edges, it is useful to be
-# able to ignore them when fixing things.
+# able to ignore them when fixing things.
#
# -skipHostCheck By default, the grooming tool will check to see that it is running
# on the host that is the first one in the list found in:
@@ -68,22 +68,22 @@
# -singleCommits By default, the grooming tool will do all of its processing and then do
# a commit of all the changes at once. This option (maybe could have been named better)
# is letting the user override the default behavior and do a commit for each
-# individual 'remove" one by one as they are encountered by the grooming logic.
-# NOTE - this only applies when using either the "-f" or "-autoFix" options since
+# individual 'remove" one by one as they are encountered by the grooming logic.
+# NOTE - this only applies when using either the "-f" or "-autoFix" options since
# those are the only two that make changes to the database.
#
# -dupeCheckOff By default, we will check all of our nodes for duplicates. This parameter lets
# us turn this check off if we don't want to do it for some reason.
#
-# -dupeFixOn When we're fixing data, by default we will NOT fix duplicates This parameter lets us turn
-# that fixing ON when we are comfortable that it can pick the correct duplicate to preserve.
+# -dupeFixOn When we're fixing data, by default we will NOT fix duplicates This parameter lets us turn
+# that fixing ON when we are comfortable that it can pick the correct duplicate to preserve.
#
# -ghost2CheckOff By default, we will check for the "new" kind of ghost that we saw on
-# Production in early February 2016. This parameter lets us turn this check off if we
+# Production in early February 2016. This parameter lets us turn this check off if we
# don't want to do it for some reason.
#
-# -ghost2FixOn When we're fixing data, by default we will NOT try to fix the "new" ghost nodes.
-# This parameter lets us turn that fixing ON if we want to try to fix them.
+# -ghost2FixOn When we're fixing data, by default we will NOT try to fix the "new" ghost nodes.
+# This parameter lets us turn that fixing ON if we want to try to fix them.
#
#
COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
@@ -93,7 +93,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
# and then let the common functions check if the function exist and invoke it
# So this all can be templated out
start_date;
-check_user;
processStat=$(ps -ef | grep '[D]ataGrooming');
if [ "$processStat" != "" ]
diff --git a/src/main/scripts/dataRestoreFromSnapshot.sh b/src/main/scripts/dataRestoreFromSnapshot.sh
index 20bd0a8..d7b1a83 100644
--- a/src/main/scripts/dataRestoreFromSnapshot.sh
+++ b/src/main/scripts/dataRestoreFromSnapshot.sh
@@ -1,8 +1,8 @@
-#!/bin/ksh
+#!/bin/sh
#
-# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore
-# data to a database by doing three things:
-# 1) clear out whatever data and schema are currently in the db
+# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore
+# data to a database by doing three things:
+# 1) clear out whatever data and schema are currently in the db
# 2) rebuild the schema (using the SchemaGenerator)
# 3) reload data from the passed-in datafile (which must found in the dataSnapShots directory and
# contain an xml view of the db data).
@@ -12,7 +12,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -lt 1 ]; then
echo "Illegal number of parameters"
@@ -29,7 +28,7 @@ if [ "$?" -ne "0" ]; then
echo "Problem clearing out database."
exit 1
fi
-
+
#### Step 2) rebuild the db-schema
execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
if [ "$?" -ne "0" ]; then
@@ -45,6 +44,6 @@ if [ "$?" -ne "0" ]; then
end_date;
exit 1
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/dataRestoreFromSnapshotMulti.sh b/src/main/scripts/dataRestoreFromSnapshotMulti.sh
index 1e322dc..b2f74d1 100644
--- a/src/main/scripts/dataRestoreFromSnapshotMulti.sh
+++ b/src/main/scripts/dataRestoreFromSnapshotMulti.sh
@@ -1,10 +1,10 @@
-#!/bin/ksh
+#!/bin/sh
#
# NOTE - this is the updated version of this script which uses multi-threaded reload code
#
-# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore
-# data to a database by doing three things:
-# 1) clear out whatever data and schema are currently in the db
+# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore
+# data to a database by doing three things:
+# 1) clear out whatever data and schema are currently in the db
# 2) rebuild the schema (using the SchemaGenerator)
# 3) reload data from the passed-in datafile (which must found in the dataSnapShots directory and
# contain an xml view of the db data).
@@ -14,7 +14,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -lt 1 ]; then
echo "Illegal number of parameters"
@@ -31,7 +30,7 @@ if [ "$?" -ne "0" ]; then
echo "Problem clearing out database."
exit 1
fi
-
+
#### Step 2) rebuild the db-schema
execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
if [ "$?" -ne "0" ]; then
@@ -47,6 +46,6 @@ if [ "$?" -ne "0" ]; then
end_date;
exit 1
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/dataSnapshot.sh b/src/main/scripts/dataSnapshot.sh
index ca3b033..11b3964 100644
--- a/src/main/scripts/dataSnapshot.sh
+++ b/src/main/scripts/dataSnapshot.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# This script invokes the dataSnapshot java class passing an option to tell it to take
# a snapshot of the database and store it as a single-line XML file.
@@ -21,7 +21,6 @@ fi
# and then let the common functions check if the function exist and invoke it
# So this all can be templated out
start_date;
-check_user;
source_profile;
# Only sourcing the file aai-graphadmin-tools-vars for dataSnapshot
diff --git a/src/main/scripts/dupeTool.sh b/src/main/scripts/dupeTool.sh
index d4cdb9c..bad366b 100644
--- a/src/main/scripts/dupeTool.sh
+++ b/src/main/scripts/dupeTool.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -38,35 +38,34 @@
# nodes created that many (or fewer) minutes ago.
# -autoFix (optional) use this if you want duplicates fixed automatically (if we
# can figure out which to delete)
-# -maxFix (optional) like with dataGrooming lets you override the default maximum
+# -maxFix (optional) like with dataGrooming lets you override the default maximum
# number of dupes that can be processed at one time
# -skipHostCheck (optional) By default, the dupe tool will check to see that it is running
# on the host that is the first one in the list found in:
# aaiconfig.properties aai.primary.filetransfer.serverlist
# This is so that when run from the cron, it only runs on one machine.
# This option lets you turn that checking off.
-# -sleepMinutes (optional) like with DataGrooming, you can override the
+# -sleepMinutes (optional) like with DataGrooming, you can override the
# sleep time done when doing autoFix between first and second checks of the data.
-# -params4Collect (optional) followed by a string to tell what properties/values to use
+# -params4Collect (optional) followed by a string to tell what properties/values to use
# to limit the nodes being looked at. Must be in the format
# of “propertName|propValue” use commas to separate if there
# are more than one name/value being passed.
# -specialTenantRule (optional) turns on logic which will use extra logic to figure
# out which tenant node can be deleted in a common scenario.
-#
-#
+#
+#
# For example (there are many valid ways to use it):
-#
+#
# dupeTool.sh -userId am8383 -nodeType tenant -timeWindowMinutes 60 -autoFix
# or
# dupeTool.sh -userId am8383 -nodeType tenant -specialTenantRule -autoFix -maxFix 100
-#
+#
COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx6g};
diff --git a/src/main/scripts/dynamicPayloadArchive.sh b/src/main/scripts/dynamicPayloadArchive.sh
index 89cf990..abac76e 100644
--- a/src/main/scripts/dynamicPayloadArchive.sh
+++ b/src/main/scripts/dynamicPayloadArchive.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -21,7 +21,7 @@
###
#
-# The script is called to tar and gzip the files under /opt/app/aai-graphadmin/data/scriptdata/addmanualdata/tenant_isolation/payload
+# The script is called to tar and gzip the files under /opt/app/aai-graphadmin/data/scriptdata/addmanualdata/tenant_isolation/payload
# which contains the payload files created by the dynamicPayloadGenerator.sh tool.
# /opt/app/aai-graphadmin/data/scriptdata/addmanualdata/tenant_isolation is mounted to the docker container
#
@@ -64,7 +64,7 @@ then
fi
cd ${ARCHIVE_DIRECTORY}
-gzip ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar
+gzip ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar
if [ $? -ne 0 ]
then
diff --git a/src/main/scripts/dynamicPayloadGenerator.sh b/src/main/scripts/dynamicPayloadGenerator.sh
index 323b161..a414e5f 100644
--- a/src/main/scripts/dynamicPayloadGenerator.sh
+++ b/src/main/scripts/dynamicPayloadGenerator.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -23,27 +23,27 @@
#
# dynamicPayloadGenerator.sh -- This tool is used to dynamically load payloads from snapshots
# It is used to load a snapshot into memory and generate payloads for any input nodes
-#
+#
#
# Parameters:
#
# -d (required) name of the fully qualified Datasnapshot file that you need to load
-# -s (optional) true or false to enable or disable schema, By default it is true for production,
+# -s (optional) true or false to enable or disable schema, By default it is true for production,
# you can change to false if the snapshot has duplicates
# -c (optional) config file to use for loading snapshot into memory.
# -o (optional) output file to store the data files
# -f (optional) PAYLOAD or DMAAP-MR
-# -n (optional) input file for the script
-#
-#
+# -n (optional) input file for the script
+#
+#
# For example (there are many valid ways to use it):
-#
+#
# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/payload_dir/'
-#
+#
# or
# dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties'
# -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/payload_dir/' -f PAYLOAD -n '/opt/app/aai-graphadmin/resources/etc/scriptdata/nodes.json'
-#
+#
echo
@@ -58,7 +58,7 @@ display_usage() {
a. -d (required) Name of the fully qualified Datasnapshot file that you need to load
3. Optional Parameters:
- a. -s (optional) true or false to enable or disable schema, By default it is true for production,
+ a. -s (optional) true or false to enable or disable schema, By default it is true for production,
b. -c (optional) config file to use for loading snapshot into memory. By default it is set to /opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties
c. -f (optional) PAYLOAD or DMAAP-MR
d. -n (optional) input file specifying the nodes and relationships to export. Default: /opt/app/aai-graphadmin/scriptdata/tenant_isolation/nodes.json
@@ -66,11 +66,11 @@ display_usage() {
f. -o (optional) output directory to store the data files
4. For example (there are many valid ways to use it):
dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/tenant_isolation/'
-
+
dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties'
-o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-graphadmin/resources/etc/scriptdata/tenant_isolation/nodes.json'
-i '/opt/app/aai-graphadmin/resources/etc/scriptdata/tenant_isolation/inputFilters.json'
-
+
EOF
}
if [ $# -eq 0 ]; then
@@ -82,7 +82,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
export JVM_OPTS="-Xmx9000m -Xms9000m"
diff --git a/src/main/scripts/dynamicPayloadPartial.sh b/src/main/scripts/dynamicPayloadPartial.sh
index 77b2919..f9b08a0 100644
--- a/src/main/scripts/dynamicPayloadPartial.sh
+++ b/src/main/scripts/dynamicPayloadPartial.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#Create empty partial snapshot file
INPUT_DATASNAPSHOT_FILE=$1
@@ -10,4 +10,4 @@ for nodeType in ${nodes[@]}
do
grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE >>$INPUT_DATASNAPSHOT_FILE'.partial'
done
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/extract-events.sh b/src/main/scripts/extract-events.sh
index 5a2bb3b..0aa4635 100644
--- a/src/main/scripts/extract-events.sh
+++ b/src/main/scripts/extract-events.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
usage(){
echo "Usage $0 input-file output-file event-type";
}
@@ -12,8 +12,8 @@ input_file=$1
output_file=$2
event_type=$3
-grep "|${event_type}|" ${input_file} > ${output_file}.1
-sed -i -e '/InvokeReturn/s/^.*$//g' ${output_file}.1
+grep "|${event_type}|" ${input_file} > ${output_file}.1
+sed -i -e '/InvokeReturn/s/^.*$//g' ${output_file}.1
sed -i '/^$/d' ${output_file}.1
cat ${output_file}.1 | awk -F '|' '{print $29}' > ${output_file}
rm ${output_file}.1
diff --git a/src/main/scripts/forceDeleteTool.sh b/src/main/scripts/forceDeleteTool.sh
index 2d42fda..f24bacb 100644
--- a/src/main/scripts/forceDeleteTool.sh
+++ b/src/main/scripts/forceDeleteTool.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# ============LICENSE_START=======================================================
# org.onap.aai
@@ -25,12 +25,12 @@
# forceDeleteTool.sh -- This tool is used to delete nodes that cannot be deleted using
# the normal REST API because of internal DB problems. For example, Phantom nodes
# and duplicate nodes cause errors to happen in "normal" REST API codes and must
-# be deleted using this tool.
-# Since it is not using the "normal" REST logic, it is also not invoking the "normal"
-# edge rules that we use to cascade deletes to "child" nodes. So - this tool can be dangerous.
-# Ie. if you accidently delete a parent node (like a cloud-region) that has many dependent
+# be deleted using this tool.
+# Since it is not using the "normal" REST logic, it is also not invoking the "normal"
+# edge rules that we use to cascade deletes to "child" nodes. So - this tool can be dangerous.
+# Ie. if you accidently delete a parent node (like a cloud-region) that has many dependent
# child nodes, there will be no way to get to any of those child-nodes after the cloud-region
-# has been deleted.
+# has been deleted.
# There are several environment variables defined in aaiconfig.properties to help minimize errors like that.
# aai.forceDel.protected.nt.list=cloud-region
# aai.forceDel.protected.edge.count=10
@@ -40,7 +40,7 @@
#
# -action (required) valid values: COLLECT_DATA or DELETE_NODE or DELETE_EDGE
# -userId (required) must be followed by a userid
-# -params4Collect (followed by a string to tell what properties/values to use
+# -params4Collect (followed by a string to tell what properties/values to use
# as part of a COLLECT_DATA request. Must be in the format
# of ?propertName|propValue? use commas to separate if there
# are more than one name/value being passed.
@@ -49,22 +49,22 @@
# -overRideProtection --- WARNING ? This over-rides the protections we introduced!
# It will let you override a protected vertex or vertex that has more
# than the allowed number of edges or descendants.
-# -DISPLAY_ALL_VIDS (optional) - in the rare case when you want to see the
-# vertex-ids (vids) of all the CONNECTED vertices, you can use this. By
+# -DISPLAY_ALL_VIDS (optional) - in the rare case when you want to see the
+# vertex-ids (vids) of all the CONNECTED vertices, you can use this. By
# default, we do not show them.
-#
-#
+#
+#
# For example:
-#
+#
# forceDeleteTool.sh -action COLLECT_DATA -userId am8383 -params4Collect "tenant-id|junk tenant01 ID 0224"
-#
+#
# forceDeleteTool.sh -action COLLECT_DATA -userId am8383 -params4Collect "cloud-owner|junkTesterCloudOwner 0224,cloud-region-id|junkTesterCloud REgion ID 0224"
#
# forceDeleteTool.sh -action DELETE_NODE -userId am8383 -vertexId 1234567
#
# forceDeleteTool.sh -action DELETE_EDGE -userId am8383 -edgeId 9876543
#
-#
+#
COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
@@ -74,7 +74,6 @@ start_date;
echo " NOTE - if you are deleting data, please run the dataSnapshot.sh script first or "
echo " at least make a note the details of the node that you are deleting. "
-check_user;
source_profile;
execute_spring_jar org.onap.aai.dbgen.ForceDeleteTool ${PROJECT_HOME}/resources/forceDelete-logback.xml "$@"
diff --git a/src/main/scripts/getDslResult.sh b/src/main/scripts/getDslResult.sh
index 78286cd..9e6ece6 100644
--- a/src/main/scripts/getDslResult.sh
+++ b/src/main/scripts/getDslResult.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -25,7 +25,7 @@ display_usage() {
Usage: $0 [options]
1. Usage: getDslResult.sh <base-path or optional host url> <optional input-json-filepath> <optional -XFROMAPPID> <optional -XTRANSID>
- 2. This script requires one argument, a base-path
+ 2. This script requires one argument, a base-path
3. Example for basepath: aai/{version}\
4. Adding the optional input-json-payload replaces the default dsl payload with the contents of the input-file
5. The query result is returned in the file resources/migration-input-files/dslResults.json
@@ -139,7 +139,7 @@ if [ $MISSING_PROP = false ]; then
fi
curl --request PUT -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -H "Content-Type: application/json" -T $fname $RESTURL$RESOURCE | jq '.' > $RESULTPATH
RC=$?
-
+
else
echo "usage: $0 <base-path>"
RC=-1
@@ -147,5 +147,5 @@ fi
if [ "a$JSONFILE" = "a$TEMPFILE" ]; then
rm $TEMPFILE
fi
-echo `date` " Done $0, exit code is $RC, returning result in $RESULTPATH"
-exit $RC \ No newline at end of file
+echo `date` " Done $0, exit code is $RC, returning result in $RESULTPATH"
+exit $RC
diff --git a/src/main/scripts/historyCreateDBSchema.sh b/src/main/scripts/historyCreateDBSchema.sh
index 7a08a68..834c1fe 100644
--- a/src/main/scripts/historyCreateDBSchema.sh
+++ b/src/main/scripts/historyCreateDBSchema.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# ============LICENSE_START=======================================================
# org.onap.aai
@@ -23,10 +23,9 @@
#
#
-COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
if [ -z "$1" ]; then
execute_spring_jar org.onap.aai.schema.GenTester4Hist ${PROJECT_HOME}/resources/logback.xml
@@ -34,4 +33,4 @@ else
execute_spring_jar org.onap.aai.schema.GenTester4Hist ${PROJECT_HOME}/resources/logback.xml "$1"
fi;
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/historyDbInitialLoad.sh b/src/main/scripts/historyDbInitialLoad.sh
index 1341b86..aa85161 100644
--- a/src/main/scripts/historyDbInitialLoad.sh
+++ b/src/main/scripts/historyDbInitialLoad.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# This script uses "history" versions of dataSnapshot and SchemaGenerator (via genTester)
# java classes to do the INITIAL load of a history database based on regular dataSnapShot
@@ -14,7 +14,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -lt 1 ]; then
echo "Illegal number of parameters"
@@ -32,7 +31,7 @@ if [ "$?" -ne "0" ]; then
echo "Problem clearing out database."
exit 1
fi
-
+
#### Step 2) rebuild the db-schema
echo "---- Second Step: rebuild the db schema ----"
execute_spring_jar org.onap.aai.schema.GenTester4Hist ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
@@ -49,6 +48,6 @@ if [ "$?" -ne "0" ]; then
end_date;
exit 1
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/historySchemaMod.sh b/src/main/scripts/historySchemaMod.sh
index c098f0e..b3c481f 100644
--- a/src/main/scripts/historySchemaMod.sh
+++ b/src/main/scripts/historySchemaMod.sh
@@ -1,30 +1,29 @@
-#!/bin/ksh
+#!/bin/sh
#
-# This script is used to correct mistakes made in the database schema.
-# It currently just allows you to change either the dataType and/or indexType on properties used by nodes.
+# This script is used to correct mistakes made in the database schema.
+# It currently just allows you to change either the dataType and/or indexType on properties used by nodes.
#
# NOTE - This script is for the History db. That is different than the
# regular schemaMod in these two ways: 1) it will never create a unique index.
# Indexes can be created, but they will never be defined as unique.
# 2) the last parameter (preserveDataFlag) is ignored since for history, we do
# not want to 'migrate' old data. Old data should not disappear or change.
-#
+#
#
# To use this script, you need to pass four parameters:
# propertyName -- the name of the property that you need to change either the index or dataType on
# targetDataType -- whether it's changing or not, you need to give it: String, Integer, Boolean or Long
# targetIndexInfo -- whether it's changing or not, you need to give it: index, noIndex or uniqueIndex
# preserveDataFlag -- true or false. The only reason I can think of why you'd ever want to
-# set this to false would be maybe if you were changing to an incompatible dataType so didn't
+# set this to false would be maybe if you were changing to an incompatible dataType so didn't
# want it to try to use the old data (and fail). But 99% of the time this will just be 'true'.
#
# Ie. historySchemaMod flavor-id String index true
#
-COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -ne 4 ]; then
echo "Illegal number of parameters"
@@ -39,6 +38,6 @@ if [ "$?" -ne "0" ]; then
end_date;
exit 1
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/historyTruncateDb.sh b/src/main/scripts/historyTruncateDb.sh
index b0ad39e..644eb29 100644
--- a/src/main/scripts/historyTruncateDb.sh
+++ b/src/main/scripts/historyTruncateDb.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -21,21 +21,21 @@
###
#
#
-# historyTruncateDb.sh -- This tool is usually run from a cron.
-# It uses the application.property "history.truncate.window.days" to look for
+# historyTruncateDb.sh -- This tool is usually run from a cron.
+# It uses the application.property "history.truncate.window.days" to look for
# and delete nodes and edges that have an end-ts earlier than the truncate window.
# Or, that can be over-ridden using the command line param, "-truncateWindowDays".
# That is - they were deleted from the 'real' database before the window.
-# So, if the window is set to 90 days, we will delete all nodes and edges
+# So, if the window is set to 90 days, we will delete all nodes and edges
# from the history db that were deleted from the real db more than 90 days ago.
#
# It also uses the property, "history.truncate.mode". Can be over-ridden using
# the command line property "-truncateMode"
# "LOG_ONLY" - look for candidate nodes/edges, but just log them (no deleting)
-# "DELETE_AND_LOG" - like it says... does the deletes and logs what
+# "DELETE_AND_LOG" - like it says... does the deletes and logs what
# it deleted (node and edge properties)
# "SILENT_DELETE" - not entirely silent, but will pare the logs way back to
-# just recording vertex and edge ids that are deleted.
+# just recording vertex and edge ids that are deleted.
#
# Ie. historyTruncateDb.sh -truncateWindowDays 60 -truncateMode LOG_ONLY
#
@@ -45,9 +45,8 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
execute_spring_jar org.onap.aai.historytruncate.HistoryTruncate ${PROJECT_HOME}/resources/logback.xml "$@"
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/migration_verification.sh b/src/main/scripts/migration_verification.sh
index 1e1b228..8935adb 100644
--- a/src/main/scripts/migration_verification.sh
+++ b/src/main/scripts/migration_verification.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -28,7 +28,7 @@
display_usage() {
cat << EOF
Usage: $0 [options]
-
+
1. Usage: migration_verification.sh <last_modified> <logs_path>
2. The <logs_path> should be a directory containing all of the logs. If empty, default path is /opt/app/aai-graphadmin/logs/migration.
3. The <last_modified> parameter should be an integer for up to how many minutes ago a log file should be parsed.
diff --git a/src/main/scripts/preDataRestore.sh b/src/main/scripts/preDataRestore.sh
index c176a1a..ba58b26 100644
--- a/src/main/scripts/preDataRestore.sh
+++ b/src/main/scripts/preDataRestore.sh
@@ -1,19 +1,18 @@
-#!/bin/ksh
+#!/bin/sh
#
# This script does just the first two steps of our normal dataRestoreFromSnapshot script.
-# This should only be needed if we are trouble-shooting and need to run step 3 (the
+# This should only be needed if we are trouble-shooting and need to run step 3 (the
# actual call to dataSnapshot) separately with different input params.
#
# This script does these two steps:
-# 1) clear out whatever data and schema are currently in the db
+# 1) clear out whatever data and schema are currently in the db
# 2) rebuild the schema (using the SchemaGenerator)
-#
+#
COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -lt 1 ]; then
echo "Illegal number of parameters"
@@ -30,7 +29,7 @@ if [ "$?" -ne "0" ]; then
echo "Problem clearing out database."
exit 1
fi
-
+
#### Step 2) rebuild the db-schema
execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
if [ "$?" -ne "0" ]; then
@@ -39,6 +38,6 @@ if [ "$?" -ne "0" ]; then
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/resend-dmaap-events.sh b/src/main/scripts/resend-dmaap-events.sh
index b8ff73f..27ce69f 100644
--- a/src/main/scripts/resend-dmaap-events.sh
+++ b/src/main/scripts/resend-dmaap-events.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -31,13 +31,13 @@
#
# Parameters:
#
-# -b, (required) <string> the base url for the dmaap server
+# -b, (required) <string> the base url for the dmaap server
# -e, (required) <file> filename containing the missed events
# -l, (optional) indicating that the script should be run in debug mode
-# it will not send the dmaap messages to dmaap server
+# it will not send the dmaap messages to dmaap server
# but it will write to a file named resend_dmaap_server.out
# -x (optional) skip resource version check
-# -p, (required) <string> the password for the dmaap server
+# -p, (required) <string> the password for the dmaap server
# -s, (required) <file> containing the data snapshot graphson file to compare the resource versions against
# partial snapshots should be concatenated into a full snapshot file
# before running the script
@@ -49,22 +49,22 @@
#
# ./resend-dmaap-events.sh -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t AAI-EVENT
#
-# For each dmaap message in the example_events.txt, it will check
+# For each dmaap message in the example_events.txt, it will check
# against graphson and try to send it to the dmaap server
# If the example_events.txt contains two events one that wasn't sent to dmaap
# and the other that was already updated by another PUT/DELETE
# and the output of the run will look something like this:
-#
+#
# Output:
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f was sent
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f not sent
-#
+#
# If lets say, there was a username password issue, you will see something like this:
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f was not sent due to dmaap error, please check logs
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f not sent
-#
+#
# From the directory in which you are executing the script (not where the script is located)
-# You can have it be located and executed in the same place
+# You can have it be located and executed in the same place
# Check for a file called resend_dmaap_error.log as it will give you more details on the error
#
# For testing purposes, if you are trying to run this script and don't want to actually
@@ -81,10 +81,10 @@
# Output:
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f was sent
# Dmaap Event with Id 7f7d8a7b-4034-46f3-a969-d7e5cbcbf75f not sent
-#
-# Also it will write the dmaap events to a file called dmaap_messages.out that
+#
+# Also it will write the dmaap events to a file called dmaap_messages.out that
# would have been sent out in the current directory where you are executing this script
-#
+#
current_directory=$( cd "$(dirname "$0")" ; pwd -P );
resend_error_log=${current_directory}/resend_dmaap_error.log
@@ -188,14 +188,14 @@ resource_version_matches_snapshot_file(){
resource_version=$3;
action=$4;
topic=$5;
-
+
if [ -z ${resource_version} ]; then
echo "Missing the parameter resource version to be passed";
return 1;
fi
# Modify the entity link passed to remove the /aai/v[0-9]+
- if [ "${topic}" = "<other-dmaap-topic>" ]; then
+ if [ "${topic}" = "<other-dmaap-topic>" ]; then
aai_uri=$(echo $entity_link | sed 's/\/<other-base>\/v[0-9][0-9]*//g');
else
aai_uri=$(echo $entity_link | sed 's/\/aai\/v[0-9][0-9]*//g');
@@ -231,7 +231,7 @@ send_dmaap(){
baseurl=$5;
topic=$6;
resp_code=0;
-
+
generated_file=$(uuidgen);
json_file=/tmp/${generated_file}.json;
@@ -260,7 +260,7 @@ send_dmaap(){
resp_code=1;
fi;
fi;
-
+
if [ -f "${json_file}" ]; then
rm $json_file;
fi;
@@ -274,8 +274,8 @@ send_dmaap(){
# Validates the events file and the snapshot file
# Goes through each line in the missed events file
-# Gets all the resource versions there are
-# Finds the smallest resource version there
+# Gets all the resource versions there are
+# Finds the smallest resource version there
# checks if the smallest resource version for the aai uri
# is what is currently in the last snapshot file provided by user
# If it is, it will send an dmaap event out
@@ -322,10 +322,10 @@ main(){
b ) # Specify the baseurl to dmaap
hostname=$OPTARG
;;
- h )
+ h )
usage;
;;
- \? )
+ \? )
echo "Invalid option: -$OPTARG" >&2
usage;
;;
@@ -338,7 +338,7 @@ main(){
if [ "$local_mode" = true ]; then
> ${resend_output};
fi;
-
+
while read dmaap_event; do
entity_link=$(echo $dmaap_event | grep -o '"entity-link":"[^"]*"' | cut -d":" -f2- | sed 's/"//g');
id=$(echo $dmaap_event | grep -o '"id":"[^"]*"' | cut -d":" -f2- | sed 's/"//g');
diff --git a/src/main/scripts/run_Migrations.sh b/src/main/scripts/run_Migrations.sh
index cbfe335..c60e59e 100644
--- a/src/main/scripts/run_Migrations.sh
+++ b/src/main/scripts/run_Migrations.sh
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -27,7 +27,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
# and then let the common functions check if the function exist and invoke it
# So this all can be templated out
start_date;
-check_user;
source_profile;
ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-migration.properties";
@@ -46,4 +45,4 @@ fi;
execute_spring_jar org.onap.aai.migration.MigrationController ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/run_SendDeleteMigrationNotification.sh b/src/main/scripts/run_SendDeleteMigrationNotification.sh
index 0b2f3e1..13d85b1 100644
--- a/src/main/scripts/run_SendDeleteMigrationNotification.sh
+++ b/src/main/scripts/run_SendDeleteMigrationNotification.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -25,7 +25,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
start_date;
-check_user;
source_profile;
INPUT_PATH=$1
@@ -62,4 +61,4 @@ fi;
execute_spring_jar org.onap.aai.util.SendDeleteMigrationNotificationsMain ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/run_SendMigrationNotification.sh b/src/main/scripts/run_SendMigrationNotification.sh
index 994485e..985c006 100644
--- a/src/main/scripts/run_SendMigrationNotification.sh
+++ b/src/main/scripts/run_SendMigrationNotification.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -9,9 +9,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -25,7 +25,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
start_date;
-check_user;
source_profile;
INPUT_PATH=$1
@@ -61,4 +60,4 @@ fi;
execute_spring_jar org.onap.aai.util.SendMigrationNotificationsMain ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/schemaMod.sh b/src/main/scripts/schemaMod.sh
index 8e6c0bb..c2e959d 100644
--- a/src/main/scripts/schemaMod.sh
+++ b/src/main/scripts/schemaMod.sh
@@ -1,16 +1,16 @@
-#!/bin/ksh
+#!/bin/sh
#
-# This script is used to correct mistakes made in the database schema.
-# It currently just allows you to change either the dataType and/or indexType on properties used by nodes.
+# This script is used to correct mistakes made in the database schema.
+# It currently just allows you to change either the dataType and/or indexType on properties used by nodes.
#
-# NOTE - JanusGraph is not particularly elegant in about making changes to the schema.
+# NOTE - JanusGraph is not particularly elegant in about making changes to the schema.
# So it is really worthwhile to get indexes and dataTypes correct the first time around.
# Note also - This script just makes changes to the schema that is currently live.
# If you were to create a new schema in a brandy-new environment, it would look like
-# whatever our OXM files told it to look like. So, part of making a
+# whatever our OXM files told it to look like. So, part of making a
# change to the live db schema should Always first be to make the change in the appropriate
-# OXM schema file so that future environments will have the change. This script is
-# just to change existing instances of the schema since schemaGenerator does not
+# OXM schema file so that future environments will have the change. This script is
+# just to change existing instances of the schema since schemaGenerator does not
# update things - it just does the initial creation.
#
# To use this script, there are 5 required parameters, and one optional:
@@ -18,21 +18,20 @@
# targetDataType -- whether it's changing or not, you need to give it: String, Integer, Boolean or Long
# targetIndexInfo -- whether it's changing or not, you need to give it: index, noIndex or uniqueIndex
# preserveDataFlag -- true or false. The only reason I can think of why you'd ever want to
-# set this to false would be maybe if you were changing to an incompatible dataType so didn't
+# set this to false would be maybe if you were changing to an incompatible dataType so didn't
# want it to try to use the old data (and fail). But 99% of the time this will just be 'true'.
# consistencyLock -- true or false. Whether to enable consistency lock on the property or not
#
-# commitBlockSize -- OPTIONAL -- how many updates to commit at once.
+# commitBlockSize -- OPTIONAL -- how many updates to commit at once.
# Default will be used if no value is passed.
#
# Ie. schemaMod flavor-id String index true true
# or, schemaMod flavor-id String noIndex true true 50000
#
-COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
if [ "$#" -ne 5 ] && [ "$#" -ne 6 ]; then
echo "Illegal number of parameters"
@@ -47,6 +46,6 @@ if [ "$?" -ne "0" ]; then
end_date;
exit 1
fi
-
+
end_date;
exit 0
diff --git a/src/main/scripts/uniquePropertyCheck.sh b/src/main/scripts/uniquePropertyCheck.sh
index c3c92bf..92da3cb 100644
--- a/src/main/scripts/uniquePropertyCheck.sh
+++ b/src/main/scripts/uniquePropertyCheck.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# The script invokes UniqueProperty java class to see if the passed property is unique in the db and if
# not, to display where duplicate values are found.
@@ -6,10 +6,9 @@
# For example: uniquePropertyCheck.sh subscriber-name
#
-COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
#execute_spring_jar org.onap.aai.util.UniquePropertyCheck ${PROJECT_HOME}/resources/uniquePropertyCheck-logback.xml "$@"
@@ -21,4 +20,4 @@ if [ $ret_code != 0 ]; then
fi
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/updatePem.sh b/src/main/scripts/updatePem.sh
index e43a2eb..dc3a494 100644
--- a/src/main/scripts/updatePem.sh
+++ b/src/main/scripts/updatePem.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
###
# ============LICENSE_START=======================================================
@@ -24,7 +24,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
. ${COMMON_ENV_PATH}/common_functions.sh
start_date;
-check_user;
source_profile;
CERTPATH=$PROJECT_HOME/resources/etc/auth/
@@ -35,4 +34,4 @@ pw=$(execute_spring_jar org.onap.aai.util.AAIConfigCommandLinePropGetter "" "aai
openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$CERTNAME -clcerts -nokeys -passin pass:$pw
openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$KEYNAME -nocerts -nodes -passin pass:$pw
end_date;
-exit 0 \ No newline at end of file
+exit 0
diff --git a/src/main/scripts/updatePropertyTool.sh b/src/main/scripts/updatePropertyTool.sh
index 7e53a3f..589e4a3 100644
--- a/src/main/scripts/updatePropertyTool.sh
+++ b/src/main/scripts/updatePropertyTool.sh
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/sh
#
# ============LICENSE_START=======================================================
# org.onap.aai
@@ -50,7 +50,6 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
start_date;
-check_user;
source_profile;
execute_spring_jar org.onap.aai.dbgen.UpdatePropertyTool ${PROJECT_HOME}/resources/updatePropertyTool-logback.xml "$@"
end_date;