aboutsummaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/csit/plans/aai/resources/docker-compose.yml2
-rw-r--r--test/csit/plans/aai/resources/setup.sh1
-rw-r--r--test/csit/plans/aai/traversal/docker-compose.yml2
-rw-r--r--test/csit/plans/aai/traversal/setup.sh1
-rw-r--r--test/csit/plans/cli/sanity-check/setup.sh2
-rw-r--r--test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/setup.sh22
-rw-r--r--test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/teardown.sh21
-rw-r--r--test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/testplan.txt3
-rw-r--r--test/csit/plans/portal-sdk/testsuite/setup.sh12
-rw-r--r--test/csit/plans/portal/testsuite/setup.sh8
-rw-r--r--test/csit/plans/sdc/nightly/setup.sh40
-rw-r--r--test/csit/plans/sdc/nightly/teardown.sh22
-rw-r--r--test/csit/plans/sdc/nightly/testplan.txt3
-rw-r--r--test/csit/plans/sdnc/healthcheck/setup.sh13
-rw-r--r--test/csit/plans/sdnc/healthcheck/teardown.sh1
-rw-r--r--test/csit/plans/so/sanity-check/setup.sh6
-rw-r--r--test/csit/plans/so/sanity-check/teardown.sh1
-rwxr-xr-xtest/csit/plans/vfc-gvnfm-vnflcm/sanity-check/setup.sh2
-rwxr-xr-xtest/csit/plans/vfc-gvnfm-vnfmgr/sanity-check/setup.sh42
-rwxr-xr-xtest/csit/plans/vfc-gvnfm-vnfres/sanity-check/setup.sh4
-rw-r--r--test/csit/plans/vfc-nfvo-catalog/sanity-check/setup.sh2
-rwxr-xr-xtest/csit/plans/vfc-nfvo-driver-ems/sanity-check/setup.sh2
-rwxr-xr-xtest/csit/plans/vfc-nfvo-driver-sfc/sanity-check/setup.sh2
-rw-r--r--test/csit/plans/vfc-nfvo-driver-vnfm-gvnfm/sanity-check/setup.sh2
-rw-r--r--test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh6
-rwxr-xr-xtest/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh2
-rw-r--r--test/csit/plans/vfc-nfvo-resmanagement/sanity-check/setup.sh2
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh46
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh4
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt6
-rw-r--r--test/csit/plans/vid/healthCheck/setup.sh2
-rw-r--r--test/csit/plans/vid/healthCheck/testplan.txt1
-rw-r--r--test/csit/plans/vnfsdk-marketplace/sanity-check/enterprise2DC.csarbin32098 -> 4622 bytes
-rw-r--r--test/csit/plans/vnfsdk-refrepo/sanity-check/enterprise2DC.csarbin32098 -> 4622 bytes
-rw-r--r--test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh49
-rw-r--r--test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh27
-rw-r--r--test/csit/scripts/nfvo-wfengine/demo.bpmn20.xml11
-rwxr-xr-xtest/csit/scripts/policy/script1.sh1
-rw-r--r--test/csit/scripts/sdc/clone_and_setup_sdc_data.sh5
-rw-r--r--test/csit/scripts/sdc/start_sdc_containers.sh71
-rw-r--r--test/csit/scripts/sdc/start_sdc_sanity.sh5
-rw-r--r--test/csit/scripts/so/chef-config/aai.crt27
-rw-r--r--test/csit/scripts/so/chef-config/mso-docker.json220
-rw-r--r--test/csit/scripts/so/mariadb/conf.d/mariadb1.cnf193
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/01-load-default-sql-files.sh25
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/02-load-additional-changes.sh27
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/automated-tests/create_mso_db-tests.sql49
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/default/create_mso_db-default.sql128
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-dns/create_mso_db-demo-dns.sql77
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-vfw/create_mso_db-demo-vfw.sql59
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mariadb_engine_7.7.3-ee.sql1195
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mysql_create_camunda_admin.sql25
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Catalog-schema.sql462
-rw-r--r--test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Requests-schema.sql84
-rw-r--r--test/csit/tests/clamp/APIs/01__TCA.robot11
-rw-r--r--test/csit/tests/dcaegen2/testcases/resources/VES-4.27.2-dataformat.json2337
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/__init__.robot2
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/basics.robot62
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone.yaml21
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone_with_errors.yaml23
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_schema.yaml17
-rw-r--r--test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_template.yaml11
-rw-r--r--test/csit/tests/policy/suite1/Policy-CSIT.robot16
-rw-r--r--test/csit/tests/policy/suite1/configpolicy_vCPE_R1.template6
-rw-r--r--test/csit/tests/policy/suite1/configpolicy_vDNS_R1.template6
-rw-r--r--test/csit/tests/policy/suite1/configpolicy_vFW_R1.template6
-rw-r--r--test/csit/tests/policy/suite1/opspolicy_VDNS_R1.template4
-rw-r--r--test/csit/tests/policy/suite1/opspolicy_VFW_R1.template7
-rw-r--r--test/csit/tests/policy/suite1/opspolicy_vCPE_R1.template16
-rw-r--r--test/csit/tests/policy/suite1/opspolicy_vOLTE_R1.template16
-rw-r--r--test/csit/tests/portal-sdk/testsuites/test1.robot18
-rw-r--r--test/csit/tests/portal/testsuites/test1.robot43
-rw-r--r--test/csit/tests/sdc/nightly/__init__.robot2
-rw-r--r--test/csit/tests/sdc/nightly/test1.robot16
-rw-r--r--test/csit/tests/sdnc/healthcheck/data/data.json4
-rw-r--r--test/csit/tests/sdnc/healthcheck/data/preload.json41
-rw-r--r--test/csit/tests/sdnc/healthcheck/test1.robot45
-rw-r--r--test/csit/tests/so/sanity-check/data/createE2eservice.json126
-rw-r--r--test/csit/tests/so/sanity-check/data/deleteE2eservice.json30
-rw-r--r--test/csit/tests/so/sanity-check/sanity_test_so.robot2
-rw-r--r--test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot2
-rw-r--r--test/csit/tests/vfc/nfvo-wfengine/workflow.robot128
-rw-r--r--test/csit/tests/vid/login/__init__.robot2
-rw-r--r--test/csit/tests/vid/login/test1.robot78
-rw-r--r--test/csit/tests/vnfsdk-marketplace/provision/enterprise2DC.csarbin4600 -> 4622 bytes
-rw-r--r--test/ete/labs/windriver/Integration-Jenkins-openrc.sh (renamed from test/ete/labs/windriver/Integration-Stable-openrc.sh)21
-rw-r--r--test/ete/labs/windriver/Integration-SB-04-openrc.sh49
-rw-r--r--test/ete/labs/windriver/onap-openstack-template.env (renamed from test/ete/labs/windriver/onap.env)100
-rwxr-xr-xtest/ete/scripts/deploy-onap.sh86
-rwxr-xr-xtest/ete/scripts/get-floating-ip.sh6
-rwxr-xr-xtest/ete/scripts/install_openstack_cli.sh47
-rwxr-xr-xtest/ete/scripts/remote/run-robot.sh20
-rwxr-xr-xtest/ete/scripts/run-healthcheck.sh26
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java2
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java5
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java54
96 files changed, 5037 insertions, 1502 deletions
diff --git a/test/csit/plans/aai/resources/docker-compose.yml b/test/csit/plans/aai/resources/docker-compose.yml
index 09d2a8127..e965e8cca 100644
--- a/test/csit/plans/aai/resources/docker-compose.yml
+++ b/test/csit/plans/aai/resources/docker-compose.yml
@@ -36,7 +36,7 @@ services:
max-size: "30m"
max-file: "5"
aai.api.simpledemo.openecomp.org:
- image: ${AAI_HAPROXY_IMAGE}
+ image: ${AAI_HAPROXY_IMAGE}:${HAPROXY_VERSION}
hostname: aai.api.simpledemo.openecomp.org
ports:
- 8443:8443
diff --git a/test/csit/plans/aai/resources/setup.sh b/test/csit/plans/aai/resources/setup.sh
index 19beb6365..42a766aa9 100644
--- a/test/csit/plans/aai/resources/setup.sh
+++ b/test/csit/plans/aai/resources/setup.sh
@@ -54,6 +54,7 @@ DOCKER_COMPOSE_CMD="docker-compose";
export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1);
export DOCKER_REGISTRY="nexus3.onap.org:10001";
export AAI_HAPROXY_IMAGE="${AAI_HAPROXY_IMAGE:-aaionap/haproxy}";
+export HAPROXY_VERSION="${HAPROXY_VERSION:-1.1.0}";
export HBASE_IMAGE="${HBASE_IMAGE:-aaionap/hbase}";
export HBASE_VERSION="${HBASE_VERSION:-1.2.0}";
diff --git a/test/csit/plans/aai/traversal/docker-compose.yml b/test/csit/plans/aai/traversal/docker-compose.yml
index 01dd4b481..7de07a63c 100644
--- a/test/csit/plans/aai/traversal/docker-compose.yml
+++ b/test/csit/plans/aai/traversal/docker-compose.yml
@@ -35,7 +35,7 @@ services:
max-size: "30m"
max-file: "5"
aai.api.simpledemo.openecomp.org:
- image: ${AAI_HAPROXY_IMAGE}
+ image: ${AAI_HAPROXY_IMAGE}:${HAPROXY_VERSION}
hostname: aai.api.simpledemo.openecomp.org
ports:
- 8443:8443
diff --git a/test/csit/plans/aai/traversal/setup.sh b/test/csit/plans/aai/traversal/setup.sh
index 010b0352e..5d58e6b80 100644
--- a/test/csit/plans/aai/traversal/setup.sh
+++ b/test/csit/plans/aai/traversal/setup.sh
@@ -54,6 +54,7 @@ DOCKER_COMPOSE_CMD="docker-compose";
export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1);
export DOCKER_REGISTRY="nexus3.onap.org:10001";
export AAI_HAPROXY_IMAGE="${AAI_HAPROXY_IMAGE:-aaionap/haproxy}";
+export HAPROXY_VERSION="${HAPROXY_VERSION:-1.1.0}";
export HBASE_IMAGE="${HBASE_IMAGE:-aaionap/hbase}";
export HBASE_VERSION="${HBASE_VERSION:-1.2.0}";
diff --git a/test/csit/plans/cli/sanity-check/setup.sh b/test/csit/plans/cli/sanity-check/setup.sh
index 17fb18c45..7945ff565 100644
--- a/test/csit/plans/cli/sanity-check/setup.sh
+++ b/test/csit/plans/cli/sanity-check/setup.sh
@@ -41,7 +41,7 @@ done
sleep 60
# Start cli
-docker run -d --name cli -e CLI_MODE=daemon nexus3.onap.org:10001/onap/cli:1.1-STAGING-latest
+docker run -d --name cli -e CLI_MODE=daemon nexus3.onap.org:10001/onap/cli:v1.1.0
# Wait for cli initialization
echo Wait for CLI initialization
diff --git a/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/setup.sh b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/setup.sh
new file mode 100644
index 000000000..03d07defb
--- /dev/null
+++ b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/setup.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2017 AT&T
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+# Place the scripts in run order:
+
+source ${WORKSPACE}/test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh
+
diff --git a/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/teardown.sh b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/teardown.sh
new file mode 100644
index 000000000..1ecaad4d2
--- /dev/null
+++ b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/teardown.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+source ${WORKSPACE}/test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh
+
diff --git a/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/testplan.txt b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/testplan.txt
new file mode 100644
index 000000000..f59237bd4
--- /dev/null
+++ b/test/csit/plans/modeling-toscaparsers-javatoscachecker/APIs/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+modeling-toscaparsers-javatoscachecker/APIs
diff --git a/test/csit/plans/portal-sdk/testsuite/setup.sh b/test/csit/plans/portal-sdk/testsuite/setup.sh
index 2091d14e0..0c90dc66b 100644
--- a/test/csit/plans/portal-sdk/testsuite/setup.sh
+++ b/test/csit/plans/portal-sdk/testsuite/setup.sh
@@ -48,16 +48,16 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal
+git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
# Refresh configuration and scripts
cd portal
git pull
cd deliveries
rm .env
-rm docker-compose.yml
+#rm docker-compose.yml
cp $CURR/.env .
-cp $CURR/docker-compose.yml .
+#cp $CURR/docker-compose.yml .
#cd properties_simpledemo/ECOMPPORTALAPP
#rm system.properties
#cp $CURR/system.properties .
@@ -68,7 +68,7 @@ source $CURR/.env
# Make inter-app communication work in CSIT
export EXTRA_HOST_IP="-i ${HOST_IP}"
-export EXTRA_HOST_NAME="-n portal.api.simpledemo.openecomp.org"
+export EXTRA_HOST_NAME="-n portal.api.simpledemo.onap.org"
# Copy property files to new directory
@@ -107,7 +107,7 @@ docker-compose up -d
# insert/update hosts entry
ip_address=$HOST_IP
-host_name="portal.api.simpledemo.openecomp.org"
+host_name="portal.api.simpledemo.onap.org"
# find existing instances in the host file and save the line numbers
matches_in_hosts="$(grep -n $host_name /etc/hosts | cut -f1 -d:)"
host_entry="${ip_address} ${host_name}"
@@ -137,7 +137,7 @@ TIME_OUT=500
INTERVAL=20
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://portal.api.simpledemo.openecomp.org:8989/ECOMPPORTAL/portalApi/healthCheck); echo $response
+ response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://portal.api.simpledemo.onap.org:8989/ONAPPORTAL/portalApi/healthCheck); echo $response
if [ "$response" == "200" ]; then
echo Portal and its database well started in $TIME seconds
diff --git a/test/csit/plans/portal/testsuite/setup.sh b/test/csit/plans/portal/testsuite/setup.sh
index ff21142af..0c90dc66b 100644
--- a/test/csit/plans/portal/testsuite/setup.sh
+++ b/test/csit/plans/portal/testsuite/setup.sh
@@ -48,7 +48,7 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal
+git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
# Refresh configuration and scripts
cd portal
@@ -68,7 +68,7 @@ source $CURR/.env
# Make inter-app communication work in CSIT
export EXTRA_HOST_IP="-i ${HOST_IP}"
-export EXTRA_HOST_NAME="-n portal.api.simpledemo.openecomp.org"
+export EXTRA_HOST_NAME="-n portal.api.simpledemo.onap.org"
# Copy property files to new directory
@@ -107,7 +107,7 @@ docker-compose up -d
# insert/update hosts entry
ip_address=$HOST_IP
-host_name="portal.api.simpledemo.openecomp.org"
+host_name="portal.api.simpledemo.onap.org"
# find existing instances in the host file and save the line numbers
matches_in_hosts="$(grep -n $host_name /etc/hosts | cut -f1 -d:)"
host_entry="${ip_address} ${host_name}"
@@ -137,7 +137,7 @@ TIME_OUT=500
INTERVAL=20
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://portal.api.simpledemo.openecomp.org:8989/ECOMPPORTAL/portalApi/healthCheck); echo $response
+ response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://portal.api.simpledemo.onap.org:8989/ONAPPORTAL/portalApi/healthCheck); echo $response
if [ "$response" == "200" ]; then
echo Portal and its database well started in $TIME seconds
diff --git a/test/csit/plans/sdc/nightly/setup.sh b/test/csit/plans/sdc/nightly/setup.sh
new file mode 100644
index 000000000..ac7a7f3f4
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/setup.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+# Place the scripts in run order:
+
+
+mkdir ${WORKSPACE}/archives
+chmod -R 777 ${WORKSPACE}/archives
+
+source ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/docker_health.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_sanity.sh
+
+
+BE_IP=`get-instance-ip.sh sdc-BE`
+echo BE_IP=${BE_IP}
+
+
+# Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v BE_IP:${BE_IP}"
+
diff --git a/test/csit/plans/sdc/nightly/teardown.sh b/test/csit/plans/sdc/nightly/teardown.sh
new file mode 100644
index 000000000..a5f69819e
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/teardown.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+source ${WORKSPACE}/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh
+
+# $WORKSPACE/archives/clamp-clone deleted with archives folder when tests starts so we keep it at the end for debugging
diff --git a/test/csit/plans/sdc/nightly/testplan.txt b/test/csit/plans/sdc/nightly/testplan.txt
new file mode 100644
index 000000000..3011ad5cb
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+sdc/nightly
diff --git a/test/csit/plans/sdnc/healthcheck/setup.sh b/test/csit/plans/sdnc/healthcheck/setup.sh
index 53590264b..4fb66d8c3 100644
--- a/test/csit/plans/sdnc/healthcheck/setup.sh
+++ b/test/csit/plans/sdnc/healthcheck/setup.sh
@@ -37,7 +37,7 @@ fi
# Clone SDNC repo to get docker-compose for SDNC
mkdir -p $WORKSPACE/archives/sdnc
cd $WORKSPACE/archives
-git clone -b master --single-branch http://gerrit.onap.org/r/sdnc/oam.git sdnc
+git clone -b master --single-branch --depth=1 http://gerrit.onap.org/r/sdnc/oam.git sdnc
cd $WORKSPACE/archives/sdnc
git pull
unset http_proxy https_proxy
@@ -55,6 +55,14 @@ docker tag $NEXUS_DOCKER_REPO/onap/ccsdk-dgbuilder-image:$CCSDK_DOCKER_IMAGE_VER
docker pull $NEXUS_DOCKER_REPO/onap/admportal-sdnc-image:$DOCKER_IMAGE_VERSION
docker tag $NEXUS_DOCKER_REPO/onap/admportal-sdnc-image:$DOCKER_IMAGE_VERSION onap/admportal-sdnc-image:latest
+docker pull $NEXUS_DOCKER_REPO/onap/sdnc-ueb-listener-image:$DOCKER_IMAGE_VERSION
+docker tag $NEXUS_DOCKER_REPO/onap/sdnc-ueb-listener-image:$DOCKER_IMAGE_VERSION onap/sdnc-ueb-listener-image:latest
+
+docker pull $NEXUS_DOCKER_REPO/onap/sdnc-dmaap-listener-image:$DOCKER_IMAGE_VERSION
+
+docker tag $NEXUS_DOCKER_REPO/onap/sdnc-dmaap-listener-image:$DOCKER_IMAGE_VERSION onap/sdnc-dmaap-listener-image:latest
+
+
# start SDNC containers with docker compose and configuration from docker-compose.yml
curl -L https://github.com/docker/compose/releases/download/1.9.0/docker-compose-`uname -s`-`uname -m` > docker-compose
chmod +x docker-compose
@@ -120,6 +128,9 @@ if [ "$num_failed_bundles" -ge 1 ]; then
echo " $failed_bundles"
fi
+# Sleep additional 120 to give application time to finish
+sleep 120
+
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
ROBOT_VARIABLES="-v SCRIPTS:${SCRIPTS}"
diff --git a/test/csit/plans/sdnc/healthcheck/teardown.sh b/test/csit/plans/sdnc/healthcheck/teardown.sh
index 4d99b9f31..925e7b732 100644
--- a/test/csit/plans/sdnc/healthcheck/teardown.sh
+++ b/test/csit/plans/sdnc/healthcheck/teardown.sh
@@ -21,5 +21,6 @@ kill-instance.sh sdnc_controller_container
kill-instance.sh sdnc_dgbuilder_container
kill-instance.sh sdnc_portal_container
kill-instance.sh sdnc_db_container
+kill-instance.sh sdnc_ueblistener_container
# $WORKSPACE/archives/appc deleted with archives folder when tests starts so we keep it at the end for debugging
diff --git a/test/csit/plans/so/sanity-check/setup.sh b/test/csit/plans/so/sanity-check/setup.sh
index 77a33bf65..cd0cbdc33 100644
--- a/test/csit/plans/so/sanity-check/setup.sh
+++ b/test/csit/plans/so/sanity-check/setup.sh
@@ -17,8 +17,12 @@
# Place the scripts in run order:
# Start all process required for executing test case
+#start mariadb
+docker run -d --name mariadb -h db.mso.testlab.openecomp.org -e MYSQL_ROOT_PASSWORD=password -p 3306:3306 -v ${WORKSPACE}/test/csit/scripts/mariadb/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d -v ${WORKSPACE}/test/csit/scripts/mariadb/conf.d:/etc/mysql/conf.d nexus3.onap.org:10001/mariadb
+
#start so
-docker run -d -i -t --name=so -p 8080:8080 nexus3.onap.org:10001/openecomp/mso
+docker run -d --name so -h mso.mso.testlab.openecomp.org -e MYSQL_ROOT_PASSWORD=password --link=mariadb:db.mso.testlab.openecomp.org -p 8080:8080 -v ${WORKSPACE}/test/csit/scripts/so/chef-config:/shared nexus3.onap.org:10001/openecomp/mso:1.1-STAGING-latest
+
SO_IP=`get-instance-ip.sh so`
# Wait for initialization
diff --git a/test/csit/plans/so/sanity-check/teardown.sh b/test/csit/plans/so/sanity-check/teardown.sh
index a924a074f..1696c745c 100644
--- a/test/csit/plans/so/sanity-check/teardown.sh
+++ b/test/csit/plans/so/sanity-check/teardown.sh
@@ -15,4 +15,5 @@
# limitations under the License.
#
+kill-instance.sh mariadb
kill-instance.sh so
diff --git a/test/csit/plans/vfc-gvnfm-vnflcm/sanity-check/setup.sh b/test/csit/plans/vfc-gvnfm-vnflcm/sanity-check/setup.sh
index 9db194663..63782a9f5 100755
--- a/test/csit/plans/vfc-gvnfm-vnflcm/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-gvnfm-vnflcm/sanity-check/setup.sh
@@ -20,7 +20,7 @@
source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
diff --git a/test/csit/plans/vfc-gvnfm-vnfmgr/sanity-check/setup.sh b/test/csit/plans/vfc-gvnfm-vnfmgr/sanity-check/setup.sh
index e95d22002..8a3f9af97 100755
--- a/test/csit/plans/vfc-gvnfm-vnfmgr/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-gvnfm-vnfmgr/sanity-check/setup.sh
@@ -17,22 +17,34 @@
# Place the scripts in run order:
# Start all process required for executing test case
-#login to the onap nexus docker repo
-docker login -u docker -p docker nexus3.onap.org:10001
-
-# Start MSB
-docker run -d -p 8500:8500 --name msb_consul nexus3.onap.org:10001/onap/msb/msb_base
-CONSUL_IP=`get-instance-ip.sh msb_consul`
-echo CONSUL_IP=${CONSUL_IP}
-docker run -d -p 10081:10081 -e CONSUL_IP=$CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
-DISCOVERY_IP=`get-instance-ip.sh msb_discovery`
-echo DISCOVERY_IP=${DISCOVERY_IP}
-docker run -d -p 80:80 -e CONSUL_IP=$CONSUL_IP -e SDCLIENT_IP=$DISCOVERY_IP --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway
-MSB_IP==`get-instance-ip.sh msb_internal_apigateway`
-echo MSB_IP=${MSB_IP}
+source ${SCRIPTS}/common_functions.sh
+
+#start msb
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
+MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
+echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
+
+docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
+MSB_DISCOVERY_IP=`get-instance-ip.sh msb_discovery`
+echo DISCOVERY_IP=${MSB_DISCOVERY_IP}
+
+docker run -d -p 80:80 -e CONSUL_IP=$MSB_CONSUL_IP -e SDCLIENT_IP=$MSB_DISCOVERY_IP --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway
+MSB_IAG_IP=`get-instance-ip.sh msb_internal_apigateway`
+echo MSB_IAG_IP=${MSB_IAG_IP}
+
+# Wait for initialization(8500 Consul, 10081 Service Registration & Discovery, 80 api gateway)
+for i in {1..10}; do
+ curl -sS -m 1 ${MSB_CONSUL_IP}:8500 && curl -sS -m 1 ${MSB_DISCOVERY_IP}:10081 && curl -sS -m 1 ${MSB_IAG_IP}:80 && break
+ echo sleep $i
+ sleep $i
+done
+
+# wait for container initalization
+echo sleep 30
+sleep 30
# start vfc-vnfmgr
-docker run -d --name vfc-vnfmgr -e MSB_ADDR=${MSB_IP}:80 nexus3.onap.org:10001/onap/vfc/vnfmgr
+docker run -d --name vfc-vnfmgr -e MSB_ADDR=${MSB_IAG_IP}:80 nexus3.onap.org:10001/onap/vfc/vnfmgr
VNFMGR_IP=`get-instance-ip.sh vfc-vnfmgr`
for i in {1..10}; do
curl -sS -m 1 ${VNFMGR_IP}:8803 && break
@@ -41,4 +53,4 @@ for i in {1..10}; do
done
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v MSB_IP:${MSB_IP} -v VNFMGR_IP:${VNFMGR_IP}"
+ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v VNFMGR_IP:${VNFMGR_IP} -v SCRIPTS:${SCRIPTS}"
diff --git a/test/csit/plans/vfc-gvnfm-vnfres/sanity-check/setup.sh b/test/csit/plans/vfc-gvnfm-vnfres/sanity-check/setup.sh
index 0139bcdcc..e976f9281 100755
--- a/test/csit/plans/vfc-gvnfm-vnfres/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-gvnfm-vnfres/sanity-check/setup.sh
@@ -21,7 +21,7 @@ source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
@@ -53,4 +53,4 @@ for i in {1..10}; do
done
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v VNFRES_IP:${VNFRES_IP}"
+ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v VNFRES_IP:${VNFRES_IP} -v SCRIPTS:${SCRIPTS}"
diff --git a/test/csit/plans/vfc-nfvo-catalog/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-catalog/sanity-check/setup.sh
index c4cdbdee5..3576e234d 100644
--- a/test/csit/plans/vfc-nfvo-catalog/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-catalog/sanity-check/setup.sh
@@ -21,7 +21,7 @@
docker login -u docker -p docker nexus3.onap.org:10001
# start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
CONSUL_IP=`get-instance-ip.sh msb_consul`
echo CONSUL_IP=${CONSUL_IP}
diff --git a/test/csit/plans/vfc-nfvo-driver-ems/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-driver-ems/sanity-check/setup.sh
index 09978ca6d..d381f8f38 100755
--- a/test/csit/plans/vfc-nfvo-driver-ems/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-driver-ems/sanity-check/setup.sh
@@ -20,7 +20,7 @@
source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
diff --git a/test/csit/plans/vfc-nfvo-driver-sfc/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-driver-sfc/sanity-check/setup.sh
index 45ad0cbb1..85a3e69f1 100755
--- a/test/csit/plans/vfc-nfvo-driver-sfc/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-driver-sfc/sanity-check/setup.sh
@@ -20,7 +20,7 @@
source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
diff --git a/test/csit/plans/vfc-nfvo-driver-vnfm-gvnfm/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-driver-vnfm-gvnfm/sanity-check/setup.sh
index fed23c5aa..bdc66d0e1 100644
--- a/test/csit/plans/vfc-nfvo-driver-vnfm-gvnfm/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-driver-vnfm-gvnfm/sanity-check/setup.sh
@@ -21,7 +21,7 @@ source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
diff --git a/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
index d18c4835b..fca54fae6 100644
--- a/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
@@ -21,7 +21,7 @@ source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
@@ -43,7 +43,7 @@ echo sleep 60
sleep 60
# start vfc-ztevmanagerdriver
-docker run -d --name vfc-ztevmanagerdriver -e MSB_ADDR=${MSB_IAG_IP}:80 nexus3.onap.org:10001/onap/vfc/ztevmanagerdriver
+docker run -d --name vfc-ztevmanagerdriver -p 8410:8410 -e MSB_ADDR=${MSB_IAG_IP}:80 nexus3.onap.org:10001/onap/vfc/ztevmanagerdriver
ZTEVMANAGERDRIVER_IP=`get-instance-ip.sh vfc-ztevmanagerdriver`
# Wait for initialization
@@ -55,7 +55,7 @@ done
# Start svnfm-huawei
-docker run -d --name vfc-svnfm-huawei -e MSB_ADDR=${MSB_IAG_IP}:80 nexus3.onap.org:10001/onap/vfc/nfvo/svnfm/huawei
+docker run -d --name vfc-svnfm-huawei -p 8482:8482 -p 8443:8443 -e MSB_ADDR=${MSB_IAG_IP}:80 nexus3.onap.org:10001/onap/vfc/nfvo/svnfm/huawei
SERVICE_IP=`get-instance-ip.sh vfc-svnfm-huawei`
for i in {1..20}; do
curl -sS ${SERVICE_IP}:8482 && break
diff --git a/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh
index 3cc53988c..74b0ab61d 100755
--- a/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh
@@ -20,7 +20,7 @@
source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
diff --git a/test/csit/plans/vfc-nfvo-resmanagement/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-resmanagement/sanity-check/setup.sh
index b0cec31a1..68b784310 100644
--- a/test/csit/plans/vfc-nfvo-resmanagement/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-resmanagement/sanity-check/setup.sh
@@ -21,7 +21,7 @@
docker login -u docker -p docker nexus3.onap.org:10001
# Start MSB
-docker run -d -p 8500:8500 --name msb_consul nexus3.onap.org:10001/onap/msb/msb_base
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
CONSUL_IP=`get-instance-ip.sh msb_consul`
echo CONSUL_IP=${CONSUL_IP}
docker run -d -p 10081:10081 -e CONSUL_IP=$CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
index a9ff9ac13..5a578230b 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
@@ -21,7 +21,7 @@ source ${SCRIPTS}/common_functions.sh
#start msb
-docker run -d -p 8500:8500 --name msb_consul consul
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
@@ -48,30 +48,40 @@ DOCKER_REPOSITORY="nexus3.onap.org:10001"
IMAGE="wfengine-activiti"
IMAGE_ACTIVITI_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
+#get current host IP addres
+SERVICE_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
+
# start wfengine-activiti
-##docker run -d --name ${IMAGE} -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME}
-##WFENGINEACTIVITIR_IP=`get-instance-ip.sh ${IMAGE}`
+docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8804 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME}
+WFENGINE_ACTIVITI_IP=`get-instance-ip.sh vfc_wfengine_activiti`
# Wait for initialization
-##for i in {1..10}; do
-## curl -sS ${WFENGINEACTIVITIR_IP}:8080 && break
-## echo sleep $i
-## sleep $i
-##done
+for i in {1..10}; do
+ curl -sS ${WFENGINE_ACTIVITI_IP}:8080 && break
+ echo sleep $i
+ sleep $i
+done
+for i in {1..10}; do
+ curl -sS ${SERVICE_IP}:8804 && break
+ echo sleep $i
+ sleep $i
+done
+docker logs vfc_wfengine_activiti
-##IMAGE="wfengine-mgrservice"
-##IMAGE_MGRSERVICE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
+IMAGE="wfengine-mgrservice"
+IMAGE_MGRSERVICE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
# Start wfengine-mgrservice
-#docker run -d --name ${IMAGE} -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME}
+docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8805 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME}
##docker run -d --name ${IMAGE} -e OPENPALETTE_MSB_IP=${WFENGINEACTIVITIR_IP} -e OPENPALETTE_MSB_PORT=8080 ${IMAGE_MGRSERVICE_NAME}
-##WFENGINEMGRSERVICE_IP=`get-instance-ip.sh ${IMAGE}`
-##for i in {1..10}; do
-## curl -sS ${WFENGINEMGRSERVICE_IP}:10550 && break
-## echo sleep $i
-## sleep $i
-##done
+WFENGINE_MGRSERVICE_IP=`get-instance-ip.sh vfc_wfengine_mgrservice`
+for i in {1..10}; do
+ curl -sS ${WFENGINE_MGRSERVICE_IP}:10550 && break
+ echo sleep $i
+ sleep $i
+done
+docker logs vfc_wfengine_mgrservice
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-#ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v MSB_IP:${MSB_IAG_IP} -v MSB_PORT:80 -v MSB_DISCOVERY_IP:${MSB_DISCOVERY_IP} -v WFENGINEACTIVITIR_IP:${WFENGINEACTIVITIR_IP} -v WFENGINEACTIVITIR_PORT:8080 -v WFENGINEMGRSERVICE_IP:${WFENGINEMGRSERVICE_IP} -v WFENGINEMGRSERVICE_PORT:10550 -v SCRIPTS:${SCRIPTS}"
+ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v MSB_IP:${MSB_IAG_IP} -v MSB_PORT:80 -v MSB_DISCOVERY_IP:${MSB_DISCOVERY_IP} -v ACTIVITI_IP:${WFENGINE_ACTIVITI_IP} -v ACTIVITI_PORT:8080 -v MGRSERVICE_IP:${WFENGINE_MGRSERVICE_IP} -v MGRSERVICE_PORT:10550 -v SCRIPTS:${SCRIPTS}"
##ROBOT_VARIABLES="-v MSB_IAG_IP:${WFENGINEACTIVITIR_IP} -v MSB_IP:${WFENGINEMGRSERVICE_IP} -v MSB_PORT:10550 -v MSB_DISCOVERY_IP:${WFENGINEACTIVITIR_IP} -v MSB_DISCOVERY_PORT:8080 -v WFENGINEACTIVITIR_IP:${WFENGINEACTIVITIR_IP} -v WFENGINEACTIVITIR_PORT:8080 -v WFENGINEMGRSERVICE_IP:${WFENGINEMGRSERVICE_IP} -v WFENGINEMGRSERVICE_PORT:10550 -v SCRIPTS:${SCRIPTS}" \ No newline at end of file
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
index caa506ecf..384bc3935 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
@@ -19,5 +19,5 @@
kill-instance.sh msb_internal_apigateway
kill-instance.sh msb_discovery
kill-instance.sh msb_consul
-##kill-instance.sh wfengine-mgrservice
-##kill-instance.sh wfengine-activiti
+kill-instance.sh vfc_wfengine_mgrservice
+kill-instance.sh vfc_wfengine_activiti
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
index ff9f4d5d6..5f6910bdd 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
@@ -1,4 +1,4 @@
-# Test suites are relative paths under [integration.git]/test/csit/tests/.
-# Place the suites in run order.
-
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+
vfc/nfvo-wfengine/workflow.robot \ No newline at end of file
diff --git a/test/csit/plans/vid/healthCheck/setup.sh b/test/csit/plans/vid/healthCheck/setup.sh
index afe9ea7ba..a117a6c78 100644
--- a/test/csit/plans/vid/healthCheck/setup.sh
+++ b/test/csit/plans/vid/healthCheck/setup.sh
@@ -18,6 +18,8 @@
#
# Place the scripts in run order:
+/usr/bin/Xvfb :0 -screen 0 1024x768x24&
+export DISPLAY=:0
source ${WORKSPACE}/test/csit/scripts/vid/clone_and_setup_vid_data.sh
diff --git a/test/csit/plans/vid/healthCheck/testplan.txt b/test/csit/plans/vid/healthCheck/testplan.txt
index ffbb593ca..92d8b0584 100644
--- a/test/csit/plans/vid/healthCheck/testplan.txt
+++ b/test/csit/plans/vid/healthCheck/testplan.txt
@@ -1,3 +1,4 @@
# Test suites are relative paths under [integration.git]/test/csit/tests/.
# Place the suites in run order.
vid/healthCheck
+vid/login
diff --git a/test/csit/plans/vnfsdk-marketplace/sanity-check/enterprise2DC.csar b/test/csit/plans/vnfsdk-marketplace/sanity-check/enterprise2DC.csar
index 29e08c162..0960b20aa 100644
--- a/test/csit/plans/vnfsdk-marketplace/sanity-check/enterprise2DC.csar
+++ b/test/csit/plans/vnfsdk-marketplace/sanity-check/enterprise2DC.csar
Binary files differ
diff --git a/test/csit/plans/vnfsdk-refrepo/sanity-check/enterprise2DC.csar b/test/csit/plans/vnfsdk-refrepo/sanity-check/enterprise2DC.csar
index 29e08c162..0960b20aa 100644
--- a/test/csit/plans/vnfsdk-refrepo/sanity-check/enterprise2DC.csar
+++ b/test/csit/plans/vnfsdk-refrepo/sanity-check/enterprise2DC.csar
Binary files differ
diff --git a/test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh b/test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh
new file mode 100644
index 000000000..3521ff1cb
--- /dev/null
+++ b/test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights
+# reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+# ===================================================================
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+echo "This is ${WORKSPACE}/test/csit/scripts/modeling-toscaparsers-javatoscachecker/setup_containers.sh"
+
+#start docker image
+run-instance.sh nexus3.onap.org:10001/onap/modeling/javatoscachecker:latest modeling_javatoscachecker_1 "-p 8080:8080"
+
+#checker docker image is accesible: picked from clamp, some common script would be good
+TIME_OUT=1200
+INTERVAL=5
+TIME=0
+while [ "$TIME" -lt "$TIME_OUT" ]; do
+ response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:8080/check_template/test_me); echo $response
+
+ if [ "$response" == "404" ]; then
+ echo javatoscachecker service started in $TIME seconds
+ break;
+ fi
+
+ echo Sleep: $INTERVAL seconds before testing if javatoscachecker service is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
+ sleep $INTERVAL
+ TIME=$(($TIME+$INTERVAL))
+done
+
+if [ "$TIME" -ge "$TIME_OUT" ]; then
+ echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
+fi
+
+
diff --git a/test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh b/test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh
new file mode 100644
index 000000000..cca4fe193
--- /dev/null
+++ b/test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights
+# reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+# ===================================================================
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+echo "This is ${WORKSPACE}/test/csit/scripts/modeling-toscaparsers-javatoscachecker/teardown_containers.sh"
+
+kill-instance.sh modeling_javatoscachecker_1
+
+
diff --git a/test/csit/scripts/nfvo-wfengine/demo.bpmn20.xml b/test/csit/scripts/nfvo-wfengine/demo.bpmn20.xml
new file mode 100644
index 000000000..4f0c83a6e
--- /dev/null
+++ b/test/csit/scripts/nfvo-wfengine/demo.bpmn20.xml
@@ -0,0 +1,11 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<definitions xmlns="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:activiti="http://activiti.org/bpmn" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:omgdc="http://www.omg.org/spec/DD/20100524/DC" xmlns:omgdi="http://www.omg.org/spec/DD/20100524/DI" typeLanguage="http://www.w3.org/2001/XMLSchema" expressionLanguage="http://www.w3.org/1999/XPath" targetNamespace="http://www.activiti.org/processdef">
+ <process id="demo" name="demofirst" isExecutable="true">
+ <startEvent id="sid-6B31A927-8A0F-4A2F-BC58-A2E3D90327D2"/>
+ <userTask id="sid-FDC595C2-41CE-4DE8-86D4-562414F0A0F3"/>
+ <sequenceFlow id="sid-B7ABA5B4-FB87-486C-B7D7-CC0FE5A4CE31" sourceRef="sid-6B31A927-8A0F-4A2F-BC58-A2E3D90327D2" targetRef="sid-FDC595C2-41CE-4DE8-86D4-562414F0A0F3"/>
+ <endEvent id="sid-EEACC228-A53D-41E8-96A1-8F2E4BF38596"/>
+ <sequenceFlow id="sid-07534141-12BB-49F0-9288-137A4E0BC8D8" sourceRef="sid-FDC595C2-41CE-4DE8-86D4-562414F0A0F3" targetRef="sid-EEACC228-A53D-41E8-96A1-8F2E4BF38596"/>
+ </process>
+
+</definitions> \ No newline at end of file
diff --git a/test/csit/scripts/policy/script1.sh b/test/csit/scripts/policy/script1.sh
index 30fb39586..db5b5155a 100755
--- a/test/csit/scripts/policy/script1.sh
+++ b/test/csit/scripts/policy/script1.sh
@@ -92,6 +92,7 @@ cat config/pe/ip_addr.txt
export MTU=9126
+export PRELOAD_POLICIES=false
docker-compose -f docker-compose-integration.yml up -d
if [ ! $? -eq 0 ]; then
diff --git a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
index a88981299..5dbfb5fc2 100644
--- a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+++ b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
@@ -30,12 +30,13 @@ mkdir -p ${WORKSPACE}/data/clone/
mkdir -p ${WORKSPACE}/data/logs/BE/SDC/SDC-BE
mkdir -p ${WORKSPACE}/data/logs/FE/SDC/SDC-FE
chmod -R 777 ${WORKSPACE}/data/logs
+ls -lR ${WORKSPACE}/data/logs/
cd ${WORKSPACE}/data/clone
-git clone --depth 1 http://gerrit.onap.org/r/sdc -b master
+git clone --depth 1 http://gerrit.onap.org/r/sdc -b ${GERRIT_BRANCH}
-chmod -R 775 ${WORKSPACE}/data/
+chmod -R 777 ${WORKSPACE}/data/clone
# set enviroment variables
diff --git a/test/csit/scripts/sdc/start_sdc_containers.sh b/test/csit/scripts/sdc/start_sdc_containers.sh
index a1ac28b14..0dd373256 100644
--- a/test/csit/scripts/sdc/start_sdc_containers.sh
+++ b/test/csit/scripts/sdc/start_sdc_containers.sh
@@ -35,6 +35,38 @@ export IP=$HOST_IP
#export PREFIX=${NEXUS_DOCKER_REPO}'/openecomp'
export PREFIX='nexus3.onap.org:10001/openecomp'
+
+function monitor_docker {
+
+echo monitor $1 Docker
+sleep 5
+TIME_OUT=800
+INTERVAL=20
+TIME=0
+while [ "$TIME" -lt "$TIME_OUT" ]; do
+
+MATCH=`docker logs --tail 30 $1 | grep "DOCKER STARTED"`
+echo MATCH is -- $MATCH
+
+if [ -n "$MATCH" ]
+ then
+ echo DOCKER start finished in $TIME seconds
+ break
+ fi
+
+ echo Sleep: $INTERVAL seconds before testing if $1 DOCKER is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
+ sleep $INTERVAL
+ TIME=$(($TIME+$INTERVAL))
+done
+
+if [ "$TIME" -ge "$TIME_OUT" ]
+ then
+ echo -e "\e[1;31mTIME OUT: DOCKER was NOT fully started in $TIME_OUT seconds... Could cause problems ...\e[0m"
+fi
+
+
+}
+
#start Elastic-Search
docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --memory 1g --memory-swap=1g --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro -e ES_HEAP_SIZE=1024M --volume ${WORKSPACE}/data/ES:/usr/share/elasticsearch/data --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9200:9200 --publish 9300:9300 ${PREFIX}/sdc-elasticsearch:${RELEASE}
@@ -42,52 +74,27 @@ docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-f
docker run --detach --name sdc-cs --env RELEASE="${RELEASE}" --env ENVNAME="${DEP_ENV}" --env HOST_IP=${IP} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/CS:/var/lib/cassandra --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9042:9042 --publish 9160:9160 ${PREFIX}/sdc-cassandra:${RELEASE}
echo "please wait while CS is starting..."
-echo ""
-c=120 # seconds to wait
-REWRITE="\e[25D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+monitor_docker sdc-cs
#start kibana
-docker run --detach --name sdc-kbn --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 5601:5601 ${PREFIX}/sdc-kibana:${RELEASE}
+#docker run --detach --name sdc-kbn --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 5601:5601 ${PREFIX}/sdc-kibana:${RELEASE}
#start sdc-backend
docker run --detach --name sdc-BE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/BE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 8443:8443 --publish 8080:8080 ${PREFIX}/sdc-backend:${RELEASE}
echo "please wait while BE is starting..."
-echo ""
-c=180 # seconds to wait
-REWRITE="\e[45D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+monitor_docker sdc-BE
#start Front-End
docker run --detach --name sdc-FE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/FE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9443:9443 --publish 8181:8181 ${PREFIX}/sdc-frontend:${RELEASE}
-echo "please wait while FE is starting..."
-echo ""
-c=160 # seconds to wait
-REWRITE="\e[45D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+echo "docker run sdc-frontend..."
+monitor_docker sdc-FE
-# WAIT 5 minutes maximum and test every 5 seconds if SDC up using HealthCheck API
-echo " WAIT 5 minutes maximum and test every 5 seconds if SDC up using HealthCheck API...."
+echo " WAIT 1 minutes maximum and test every 5 seconds if SDC up using HealthCheck API...."
-TIME_OUT=600
+TIME_OUT=60
INTERVAL=5
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
diff --git a/test/csit/scripts/sdc/start_sdc_sanity.sh b/test/csit/scripts/sdc/start_sdc_sanity.sh
index e9e92148b..2b553136b 100644
--- a/test/csit/scripts/sdc/start_sdc_sanity.sh
+++ b/test/csit/scripts/sdc/start_sdc_sanity.sh
@@ -26,7 +26,7 @@
#start Sanity docker
-docker run --detach --name sdc-sanity --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 1g --memory-swap=1g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/sdc-sanity/target:/var/lib/tests/target --volume ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport:/var/lib/tests/ExtentReport --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9560:9560 ${PREFIX}/sdc-sanity:${RELEASE}
+docker run --detach --name sdc-sanity --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/sdc-sanity/target:/var/lib/tests/target --volume ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport:/var/lib/tests/ExtentReport --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9560:9560 ${PREFIX}/sdc-sanity:${RELEASE}
#echo "please wait while Sanity Docker is starting..."
echo ""
@@ -65,6 +65,7 @@ if [ "$TIME" -ge "$TIME_OUT" ]
then
echo TIME OUT: Sany was NOT completed in $TIME_OUT seconds... Could cause problems for tests...
fi
-cp -rf ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport/* ${WORKSPACE}/archives/
+cp -rf ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport/* ${WORKSPACE}/archives/
cp -rf ${WORKSPACE}/data/logs/ ${WORKSPACE}/archives/
+cp -rf ${WORKSPACE}/data/logs/sdc-sanity/target/*.xml ${WORKSPACE}/archives/
diff --git a/test/csit/scripts/so/chef-config/aai.crt b/test/csit/scripts/so/chef-config/aai.crt
new file mode 100644
index 000000000..4ffa426c1
--- /dev/null
+++ b/test/csit/scripts/so/chef-config/aai.crt
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEiTCCA3GgAwIBAgIJAIPKfDLcn3MpMA0GCSqGSIb3DQEBCwUAMIGtMQswCQYD
+VQQGEwJVUzELMAkGA1UECAwCTkoxEzARBgNVBAcMCkJlZG1pbnN0ZXIxEjAQBgNV
+BAoMCU9wZW5FQ09NUDETMBEGA1UECwwKc2ltcGxlZGVtbzEqMCgGA1UEAwwhT3Bl
+bkVDT01QIHNpbXBsZWRlbW8gU2VydmVyIENBIFgxMScwJQYJKoZIhvcNAQkBFhhz
+aW1wbGVkZW1vQG9wZW5lY29tcC5vcmcwHhcNMTYxMTMwMTUzODM5WhcNMTcxMTMw
+MTUzODM5WjCBuTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk5KMRMwEQYDVQQHDApC
+ZWRtaW5zdGVyMRIwEAYDVQQKDAlPcGVuRUNPTVAxEzARBgNVBAsMClNpbXBsZURl
+bW8xKTAnBgNVBAMMIGFhaS5hcGkuc2ltcGxlZGVtby5vcGVuZWNvbXAub3JnMTQw
+MgYJKoZIhvcNAQkBFiVhYWktaG9zdEBhcGkuc2ltcGxlZGVtby5vcGVuZWNvbXAu
+b3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwQrQl8A0rT0Jjlos
+Mr/7LEhT5UOif4GGPOk+3NCIxT3lOqAbUf+d9ZXyT2jWFRiKWua03vQ+Dxc8c2h2
+RRuH8LwEiOiWqPjWRxNqsARzZMI3ryHFCFBZh0FcpjH9kEeKVlLDYuV68k+ZucKd
+NiqUNn61lD7kbmEGwvzKwf91FrJ09+CBMx1OnWKm3gCNKDqAEFMZCOdn2MgesJYB
+/03lzPBS1jDfBXImXRcTBzpgA+wdCLn0cIQ1eLWUwS5tUqUJNh36nHdVyJ0P2Yjd
+JLuxhFcmBKOz1ShyyO+BBtKBO8EGbU6qKflOiwOw0Fsn8LjKcrHQ58NPui5y04BU
+Rypf3QIDAQABo4GdMIGaMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgO4MB0G
+A1UdDgQWBBQyMUOsE2J+CKzK0qd8KFBD2gaWyjBbBgNVHSAEVDBSMFAGBFUdIAAw
+SDBGBggrBgEFBQcCAjA6GjhLZWVwIGF3YXkgZnJvbSBjaGlsZHJlbi4gIFRoaXMg
+Y2VydGlmaWNhdGUgaXMgbm90IGEgdG95LjANBgkqhkiG9w0BAQsFAAOCAQEAnkoy
+2tWJOyyyIQwtVojUxv1GWQPnw3WCUcKpuX4CJhHXLxNErW1fBg7bmo08BNmBPPpq
+WrJsy5lbBgUo9kgpViux5Stfy1rRIRsRLfl/icgCvJmUAxkmRCZL7yUvwG4K7s+8
+DwT+nW/XuWNP6Hd/qHccexB6COJ8KwvTdVoxAkCdX8qw4MCb/f7Kb1yle/vwBM5Q
+UUONCJ4bEns1vnb9DGlNDUJNwCfwORAaVJpVS38Mv4UnSTmb2KMePtCWcx/dNsYR
+2XrSGqLDnTvHwOpyhbfFTmackysGoSuDytORXy8YbwEiF13BwEK8i3rgNN0Z2ojf
+cpmE2xxmaa+A2uuN6g==
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/test/csit/scripts/so/chef-config/mso-docker.json b/test/csit/scripts/so/chef-config/mso-docker.json
new file mode 100644
index 000000000..13b0d22fc
--- /dev/null
+++ b/test/csit/scripts/so/chef-config/mso-docker.json
@@ -0,0 +1,220 @@
+{
+ "name": "mso-docker",
+ "description": "MSO Docker Images",
+ "chef_type": "environment",
+ "json_class": "Chef::Environment",
+
+ "default_attributes":
+ {
+ "mso_config_path": "/etc/mso/config.d/",
+
+ "mso-topology-config":
+ {
+ "msb-server-ip": "172.30.3.34",
+ "msb-server-port": "80"
+ },
+
+ "mso-api-handler-infra-config":
+ {
+ "bpelURL": "http://mso:8080",
+ "bpelAuth": "786864AA53D0DCD881AED1154230C0C3058D58B9339D2EFB6193A0F0D82530E1",
+ "camundaURL": "http://mso:8080",
+ "camundaAuth": "5119D1AF37F671FC01FFAD2151D93EFB2BBB503E879FD07104D024EDDF118FD1"
+ },
+
+ "asdc-connections":
+ {
+ "asdc-controller1":
+ {
+ "user": "mso",
+ "consumerGroup": "sdc-OpenSource-Env1",
+ "consumerId": "sdc-COpenSource-Env11",
+ "environmentName": "SDC-OpenSource-Env1",
+ "asdcAddress": "c2.vm1.sdc.simpledemo.openecomp.org:8443",
+ "password": "613AF3483E695524F9857643B697FA51C7A9A0951094F53791485BF3458F9EADA37DBACCCEBD0CB242B85B4062745247",
+ "pollingInterval": 60,
+ "pollingTimeout": 60,
+ "relevantArtifactTypes": "HEAT,HEAT_ENV,HEAT_VOL",
+ "activateServerTLSAuth": "false",
+ "keyStorePassword": "",
+ "keyStorePath": ""
+ },
+
+ "asdc-controller2":
+ {
+ "user": "user",
+ "consumerGroup": "mso",
+ "consumerId": "mso",
+ "environmentName": "PROD",
+ "asdcAddress": "asdc_hostname2:8443",
+ "password": "f3895035812addbf115bfaf7d2dc850e",
+ "pollingInterval": 60,
+ "pollingTimeout": 60,
+ "relevantArtifactTypes": "HEAT,HEAT_ENV,HEAT_VOL",
+ "activateServerTLSAuth": "false",
+ "keyStorePassword": "",
+ "keyStorePath": ""
+ }
+ },
+
+ "mso-sdnc-adapter-config":
+ {
+ "sdncurls":
+ [
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/L3SDN-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/config/L3SDN-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/Firewall-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/config",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/VNF-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/NBNC-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/NORTHBOUND-API:service-topology-operation",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/GENERIC-RESOURCE-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/restconf/operations/VNFTOPOLOGYAIC-API:",
+ "http://c1.vm1.sdnc.simpledemo.openecomp.org:8282/"
+ ],
+
+ "bpelurl": "http://c1.vm1.mso.simpledemo.openecomp.org:8080/mso/SDNCAdapterCallbackService",
+ "restbpelurl": "http://c1.vm1.mso.simpledemo.openecomp.org:8080/mso/WorkflowMessage",
+ "myurl": "http://c1.vm1.mso.simpledemo.openecomp.org:8080/adapters/rest/SDNCNotify",
+ "sdncauth": "263f7d5f944d4d0c76db74b4148bec67d0bc796a874bc0d2a2a12aae89a866aa69133f700f391f784719a37f6a68d29bf5a2fbae1dab0402db7788c800c5ba73",
+ "bpelauth": "5119D1AF37F671FC01FFAD2151D93EFB2BBB503E879FD07104D024EDDF118FD1",
+ "sdncconnecttime": "5000"
+ },
+
+ "mso-po-adapter-config":
+ {
+ "identity_services":
+ [
+ {
+ "dcp_clli": "RAX_KEYSTONE",
+ "identity_url": "https://identity.api.rackspacecloud.com/v2.0",
+ "mso_id": "RACKSPACE_ACCOUNT_ID",
+ "mso_pass": "RACKSPACE_ACCOUNT_APIKEY",
+ "admin_tenant": "service",
+ "member_role": "admin",
+ "tenant_metadata": "true",
+ "identity_server_type": "KEYSTONE",
+ "identity_authentication_type": "RACKSPACE_APIKEY"
+ }
+ ],
+
+ "cloud_sites":
+ [
+ {
+ "id": "Dallas",
+ "aic_version": "2.5",
+ "lcp_clli": "DFW",
+ "region_id": "DFW",
+ "identity_service_id": "RAX_KEYSTONE"
+ },
+
+ {
+ "id": "Northern Virginia",
+ "aic_version": "2.5",
+ "lcp_clli": "IAD",
+ "region_id": "IAD",
+ "identity_service_id": "RAX_KEYSTONE"
+ },
+
+ {
+ "id": "Chicago",
+ "aic_version": "2.5",
+ "lcp_clli": "ORD",
+ "region_id": "ORD",
+ "identity_service_id": "RAX_KEYSTONE"
+ }
+ ],
+
+ "vnfbpelauth": "5119D1AF37F671FC01FFAD2151D93EFB2BBB503E879FD07104D024EDDF118FD1",
+ "checkrequiredparameters": "true",
+ "nwbpelauth": "5119D1AF37F671FC01FFAD2151D93EFB2BBB503E879FD07104D024EDDF118FD1"
+ },
+
+ "mso-workflow-message-adapter-config":
+ {
+ "wmbpelurl": "http://mso:8080/mso/WorkflowMessage",
+ "wmbpelauth": "5119D1AF37F671FC01FFAD2151D93EFB2BBB503E879FD07104D024EDDF118FD1"
+ },
+
+ "mso-appc-adapter-config":
+ {
+ "appc_url": "http://localhost:18080",
+ "appc_stub": "/AppC-Simulator/action/execute",
+ "appc_auth": "786864AA53D0DCD881AED1154230C0C3058D58B9339D2EFB6193A0F0D82530E1",
+ "appc_timeout": "30",
+ "ueb_cluster_address": "http://localhost:18080",
+ "ueb_consumer_group": "testgroup",
+ "ueb_consumer_id": "testuser",
+ "ueb_topic": "queuetopic",
+ "ueb_polling_interval": "30",
+ "ueb_user": "user",
+ "ueb_password": "1ec0d74615d4e4639f991c0590c83b88",
+ "bpel_url": "http://localhost:18080",
+ "bpel_stub": "/AppC-Simulator/bpmn/appCResponse",
+ "bpel_auth": "786864AA53D0DCD881AED1154230C0C3058D58B9339D2EFB6193A0F0D82530E1",
+ "bpel_timeout": "30"
+ },
+
+ "mso-bpmn-config":
+ {
+ "urnFileSystemLoadingEnabled": "true"
+ },
+
+ "mso-bpmn-urn-config":
+ {
+ "debug":"false",
+ "invariantIdL3ToHigherLayerDeleteBonding": "50359538-066f-4a8d-807f-f2bc8eaa79dc",
+ "versionIdL3ToHigherLayerDeleteBonding": "52dbec20-47aa-42e4-936c-331d8e350d44",
+ "infraCustomerId": "21014aa2-526b-11e6-beb8-9e71128cae77",
+ "sniroAuth": "test:testpwd",
+ "sniroEndpoint": "http://sniro.api.simpledemo.openecomp.org:8080/sniro/api/v2/placement",
+ "sniroTimeout": "PT30M",
+ "serviceAgnosticSniroHost": "http://sniro.api.simpledemo.openecomp.org:8080",
+ "serviceAgnosticSniroEndpoint": "/sniro/api/v2/placement",
+ "aaiEndpoint": "https://aai.api.simpledemo.openecomp.org:8443",
+ "aaiAuth": "2630606608347B7124C244AB0FE34F6F",
+ "adaptersNamespace": "http://org.openecomp.mso",
+ "adaptersCompletemsoprocessEndpoint": "http://mso:8080/CompleteMsoProcess",
+ "adaptersDbEndpoint": "http://mso:8080/dbadapters/MsoRequestsDbAdapter",
+ "adaptersOpenecompDbEndpoint": "http://mso:8080/dbadapters/RequestsDbAdapter",
+ "catalogDbEndpoint": "http://mso:8080/ecomp/mso/catalog",
+ "adaptersSdncEndpoint": "http://mso:8080/adapters/SDNCAdapter",
+ "adaptersSdncRestEndpoint": "http://mso:8080/adapters/rest/v1/sdnc",
+ "adaptersTenantEndpoint": "http://mso:8080/tenants/TenantAdapter",
+ "adaptersDbAuth": "6B0E6863FB8EE010AB6F191B3C0489437601E81DC7C86305CB92DB98AFC53D74",
+ "adaptersWorkflowMessageEndpoint": "http://mso:8080/workflows/messages",
+ "workflowMessageEndpoint": "http://mso:8080/mso/WorkflowMessage",
+ "workflowSdncAdapterCallback": "http://mso:8080/mso/SDNCAdapterCallbackService",
+ "workflowSdncReplicationDelay": "PT5S",
+ "workflowAaiDistributionDelay": "PT30S",
+ "msoKey": "07a7159d3bf51a0e53be7a8f89699be7",
+ "adaptersPoAuth": "6B0E6863FB8EE010AB6F191B3C0489437601E81DC7C86305CB92DB98AFC53D74",
+ "sdncTimeout": "PT5M",
+ "rollback": "true",
+ "adaptersNetworkEndpoint": "http://mso:8080/networks/NetworkAdapter",
+ "adaptersNetworkRestEndpoint": "http://mso:8080/networks/rest/v1/networks",
+ "adaptersVnfAsyncEndpoint": "http://mso:8080/vnfs/VnfAdapterAsync",
+ "workflowVnfAdapterDeleteCallback": "http://mso:8080/mso/vnfAdapterNotify",
+ "workflowVnfAdapterCreateCallback": "http://mso:8080/mso/vnfAdapterNotify",
+ "adaptersVnfRestEndpoint": "http://mso:8080/vnfs/rest/v1/vnfs",
+ "workflowVnfAdapterRestCallback": "http://mso:8080/mso/vnfAdapterRestNotify",
+ "poTimeout": "PT5M",
+ "sdncFirewallYangModel": "http://com/att/svc/mis/firewall-lite-gui",
+ "sdncFirewallYangModelVersion": "2015-05-15",
+ "sdncTimeoutFirewall": "20",
+ "callbackRetryAttempts": "30",
+ "callbackRetrySleepTime": "1000",
+ "workflowL3ToHigherLayerAddBondingModelName": "WAN Bonding",
+ "workflowL3ToHigherLayerAddBondingModelVersion": "2.0"
+ }
+ },
+
+ "override_attributes":
+ {
+ },
+
+ "cookbook_versions":
+ {
+ "mso-config": "~> 1.0.0"
+ }
+}
diff --git a/test/csit/scripts/so/mariadb/conf.d/mariadb1.cnf b/test/csit/scripts/so/mariadb/conf.d/mariadb1.cnf
new file mode 100644
index 000000000..39ed02248
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/conf.d/mariadb1.cnf
@@ -0,0 +1,193 @@
+# Example MySQL config file for medium systems.
+#
+# This is for a system with memory 8G where MySQL plays
+# an important part, or systems up to 128M where MySQL is used together with
+# other programs (such as a web server)
+#
+# In this file, you can use all long options that a program supports.
+# If you want to know which options a program supports, run the program
+# with the "--help" option.
+
+# The following options will be passed to all MySQL clients
+##[client]
+##user = root
+##port = 3306
+##socket = //opt/app/mysql/mysql.sock
+
+# Here follows entries for some specific programs
+
+# The MySQL server
+[mysqld]
+##performance_schema
+
+slow_query_log =ON
+long_query_time =2
+slow_query_log_file =//var/lib/mysql/slow_query.log
+
+skip-external-locking
+explicit_defaults_for_timestamp = true
+skip-symbolic-links
+local-infile = 0
+key_buffer_size = 16M
+max_allowed_packet = 4M
+table_open_cache = 100
+sort_buffer_size = 512K
+net_buffer_length = 8K
+read_buffer_size = 256K
+read_rnd_buffer_size = 512K
+myisam_sort_buffer_size = 8M
+max_connections = 500
+lower_case_table_names = 1
+thread_stack = 256K
+thread_cache_size = 25
+query_cache_size = 8M
+query_cache_type = 0
+query_prealloc_size = 512K
+query_cache_limit = 1M
+
+# Password validation
+##plugin-load-add=simple_password_check.so
+##simple_password_check_other_characters=0
+
+# Audit Log settings
+plugin-load-add=server_audit.so
+server_audit=FORCE_PLUS_PERMANENT
+server_audit_file_path=//var/lib/mysql/audit.log
+server_audit_file_rotate_size=50M
+server_audit_events=CONNECT,QUERY,TABLE
+server_audit_logging=on
+
+# Don't listen on a TCP/IP port at all. This can be a security enhancement,
+# if all processes that need to connect to mysqld run on the same host.
+# All interaction with mysqld must be made via Unix sockets or named pipes.
+# Note that using this option without enabling named pipes on Windows
+# (via the "enable-named-pipe" option) will render mysqld useless!
+#
+#skip-networking
+
+# Replication Master Server (default)
+# binary logging is required for replication
+##log-bin=//var/lib/mysql/mysql-bin
+
+# binary logging format - mixed recommended
+binlog_format=row
+
+# required unique id between 1 and 2^32 - 1
+# defaults to 1 if master-host is not set
+# but will not function as a master if omitted
+
+# Replication Slave (comment out master section to use this)
+#
+# To configure this host as a replication slave, you can choose between
+# two methods :
+#
+# 1) Use the CHANGE MASTER TO command (fully described in our manual) -
+# the syntax is:
+#
+# CHANGE MASTER TO MASTER_HOST=<host>, MASTER_PORT=<port>,
+# MASTER_USER=<user>, MASTER_PASSWORD=<password> ;
+#
+# where you replace <host>, <user>, <password> by quoted strings and
+# <port> by the master's port number (3306 by default).
+#
+# Example:
+#
+# CHANGE MASTER TO MASTER_HOST='125.564.12.1', MASTER_PORT=3306,
+# MASTER_USER='joe', MASTER_PASSWORD='secret';
+#
+# OR
+#
+# 2) Set the variables below. However, in case you choose this method, then
+# start replication for the first time (even unsuccessfully, for example
+# if you mistyped the password in master-password and the slave fails to
+# connect), the slave will create a master.info file, and any later
+# change in this file to the variables' values below will be ignored and
+# overridden by the content of the master.info file, unless you shutdown
+# the slave server, delete master.info and restart the slaver server.
+# For that reason, you may want to leave the lines below untouched
+# (commented) and instead use CHANGE MASTER TO (see above)
+#
+# required unique id between 2 and 2^32 - 1
+# (and different from the master)
+# defaults to 2 if master-host is set
+# but will not function as a slave if omitted
+#server-id = 2
+#
+# The replication master for this slave - required
+#master-host = <hostname>
+#
+# The username the slave will use for authentication when connecting
+# to the master - required
+#master-user = <username>
+#
+# The password the slave will authenticate with when connecting to
+# the master - required
+#master-password = <password>
+#
+# The port the master is listening on.
+# optional - defaults to 3306
+#master-port = <port>
+#
+# binary logging - not required for slaves, but recommended
+#log-bin=mysql-bin
+
+# Uncomment the following if you are using InnoDB tables
+##innodb_data_home_dir = //opt/app/mysql/data
+##innodb_data_file_path = ibdata1:20M:autoextend:max:32G
+##innodb_log_group_home_dir = //opt/app/mysql/iblogs
+# You can set .._buffer_pool_size up to 50 - 80 %
+# of RAM but beware of setting memory usage too high
+#innodb_buffer_pool_size = 6380M
+#innodb_additional_mem_pool_size = 2M
+# Set .._log_file_size to 25 % of buffer pool size
+innodb_log_file_size = 150M
+innodb_log_files_in_group = 3
+innodb_log_buffer_size = 8M
+#innodb_flush_log_at_trx_commit = 1
+innodb_lock_wait_timeout = 50
+innodb_autoextend_increment = 100
+expire_logs_days = 8
+open_files_limit = 2000
+transaction-isolation=READ-COMMITTED
+####### Galera parameters #######
+## Galera Provider configuration
+wsrep_provider=/usr/lib/galera/libgalera_smm.so
+wsrep_provider_options="gcache.size=2G; gcache.page_size=1G"
+## Galera Cluster configuration
+wsrep_cluster_name="MSO-automated-tests-cluster"
+wsrep_cluster_address="gcomm://"
+#wsrep_cluster_address="gcomm://mariadb1,mariadb2,mariadb3"
+##wsrep_cluster_address="gcomm://192.169.3.184,192.169.3.185,192.169.3.186"
+## Galera Synchronization configuration
+wsrep_sst_method=rsync
+#wsrep_sst_method=xtrabackup-v2
+#wsrep_sst_auth="sstuser:Mon#2o!6"
+## Galera Node configuration
+wsrep_node_name="mariadb1"
+##wsrep_node_address="192.169.3.184"
+wsrep_on=ON
+## Status notification
+#wsrep_notify_cmd=/opt/app/mysql/bin/wsrep_notify
+#######
+
+
+[mysqldump]
+quick
+max_allowed_packet = 16M
+
+[mysql]
+no-auto-rehash
+# Remove the next comment character if you are not familiar with SQL
+#safe-updates
+
+[myisamchk]
+key_buffer_size = 20971520
+
+##[mysqlhotcopy]
+##interactive-timeout
+##[mysqld_safe]
+##malloc-lib=//opt/app/mysql/local/lib/libjemalloc.so.1
+##log-error=//opt/app/mysql/log/mysqld.log
+
+general_log_file = /var/log/mysql/mysql.log
+general_log = 1
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/01-load-default-sql-files.sh b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/01-load-default-sql-files.sh
new file mode 100644
index 000000000..326f27356
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/01-load-default-sql-files.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+#
+# ============LICENSE_START==========================================
+# ===================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ===================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+#
+# ECOMP and OpenECOMP are trademarks
+# and service marks of AT&T Intellectual Property.
+#
+#
+cd /docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-vfw
+mysql -uroot -p$MYSQL_ROOT_PASSWORD -f < create_mso_db-demo-vfw.sql \ No newline at end of file
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/02-load-additional-changes.sh b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/02-load-additional-changes.sh
new file mode 100644
index 000000000..a16380108
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/02-load-additional-changes.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# ============LICENSE_START==========================================
+# ===================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ===================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+#
+# ECOMP and OpenECOMP are trademarks
+# and service marks of AT&T Intellectual Property.
+#
+#
+
+# TODO: update this script to work with the new DB schema
+
+# mysql -uroot -p$MYSQL_ROOT_PASSWORD -e "UPDATE heat_environment SET ENVIRONMENT='parameters:\n vfw_image_name: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)\n vfw_flavor_name: 4 GB General Purpose v1\n public_net_id: 00000000-0000-0000-0000-000000000000\n unprotected_private_net_id: zdfw1fwl01_unprotected\n protected_private_net_id: zdfw1fwl01_protected\n ecomp_private_net_id: oam_ecomp\n unprotected_private_net_cidr: 192.168.10.0/24\n protected_private_net_cidr: 192.168.20.0/24\n ecomp_private_net_cidr: 192.168.9.0/24\n vfw_private_ip_0: 192.168.10.100\n vfw_private_ip_1: 192.168.20.100\n vfw_private_ip_2: 192.168.9.100\n vpg_private_ip_0: 192.168.10.200\n vpg_private_ip_1: 192.168.9.200\n vsn_private_ip_0: 192.168.20.250\n vsn_private_ip_1: 192.168.9.250\n vfw_name_0: zdfw1fwl01fwl01\n vpg_name_0: zdfw1fwl01pgn01\n vsn_name_0: zdfw1fwl01snk01\n vnf_id: vFirewall_demo_app\n vf_module_id: vFirewall\n webserver_ip: 162.242.237.182\n dcae_collector_ip: 192.168.9.1\n key_name: vfw_key\n pub_key: INSERT YOUR PUBLIC KEY HERE' where id=5;" mso_catalog
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/automated-tests/create_mso_db-tests.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/automated-tests/create_mso_db-tests.sql
new file mode 100644
index 000000000..146ad0160
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/automated-tests/create_mso_db-tests.sql
@@ -0,0 +1,49 @@
+SOURCE ../default/create_mso_db-default.sql
+
+USE `mso_requests`;
+DROP USER 'mso';
+CREATE USER 'mso';
+GRANT ALL on mso_requests.* to 'mso' identified by 'mso123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+USE `mso_catalog`;
+DROP USER 'catalog';
+CREATE USER 'catalog';
+GRANT ALL on mso_catalog.* to 'catalog' identified by 'catalog123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+LOCK TABLES `NETWORK_RESOURCE` WRITE;
+/*!40000 ALTER TABLE `NETWORK_RESOURCE` DISABLE KEYS */;
+/*!40000 ALTER TABLE `NETWORK_RESOURCE` ENABLE KEYS */;
+insert into NETWORK_RESOURCE (id, NETWORK_TYPE, VERSION_STR, ORCHESTRATION_MODE ,DESCRIPTION, TEMPLATE_ID, NEUTRON_NETWORK_TYPE, AIC_VERSION_MIN) values
+(1, "vlan",'1',"NEUTRON","Cool network",1,"BASIC","0");
+UNLOCK TABLES;
+
+LOCK TABLES `NETWORK_RECIPE` WRITE;
+/*!40000 ALTER TABLE `NETWORK_RECIPE` DISABLE KEYS */;
+INSERT INTO `NETWORK_RECIPE`(`NETWORK_TYPE`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`) VALUES
+('vlan','CREATE','1',NULL,'/active-bpel/services/REST/CreateNetwork',NULL,180,NULL),
+('vlan','DELETE','1',NULL,'/active-bpel/services/REST/DeleteNetwork',NULL,180,NULL);
+/*!40000 ALTER TABLE `NETWORK_RECIPE` ENABLE KEYS */;
+UNLOCK TABLES;
+
+LOCK TABLES `VNF_RECIPE` WRITE;
+INSERT INTO `VNF_RECIPE`(`ID`, `VNF_TYPE`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`) VALUES
+(100,'VPE','CREATE','1','','/active-bpel/services/REST/CreateGenericVNF','',180,'SDN-ETHERNET-INTERNET'),
+(101,'VPE','DELETE','1','','/active-bpel/services/REST/DeleteGenericVNF','',180,'SDN-ETHERNET-INTERNET');
+UNLOCK TABLES;
+
+LOCK TABLES `VF_MODULE` WRITE;
+INSERT INTO `VF_MODULE`(`ID`, `TYPE`, `ASDC_SERVICE_MODEL_VERSION`, `MODEL_NAME`, `MODEL_VERSION`, `IS_BASE`, `VNF_RESOURCE_ID`) VALUES
+(100,'dns-servicetest/DNSResource-1::VF_DNS::module-1','1.0','VF_DNS::module-1','1.0','1','7'),
+(101,'dns-servicetest/DNSResource-1::Mog111..mog_psm..module-1','1.0','Mog111..mog_psm..module-1','1.0','1','7');
+UNLOCK TABLES;
+
+LOCK TABLES `VNF_RESOURCE` WRITE;
+INSERT INTO `VNF_RESOURCE`(`ID`, `VNF_TYPE`, `ASDC_SERVICE_MODEL_VERSION`, `ORCHESTRATION_MODE`, `MODEL_VERSION`) VALUES
+(100,'dns-servicetest/DNSResource-1','1.0','VF_DNS::module-1','1.0');
+UNLOCK TABLES;
+
+DELETE FROM HEAT_TEMPLATE_PARAMS;
+DELETE FROM HEAT_TEMPLATE;
+DELETE FROM HEAT_ENVIRONMENT;
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/default/create_mso_db-default.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/default/create_mso_db-default.sql
new file mode 100644
index 000000000..1fea840d7
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/default/create_mso_db-default.sql
@@ -0,0 +1,128 @@
+SOURCE ../../camunda/mariadb_engine_7.7.3-ee.sql
+
+--
+-- Create an admin user automatically for the cockpit
+--
+SOURCE ../../camunda/mysql_create_camunda_admin.sql
+
+--
+-- Current Database: `mso_requests`
+--
+
+DROP DATABASE IF EXISTS `mso_requests`;
+
+CREATE DATABASE /*!32312 IF NOT EXISTS*/ `mso_requests` /*!40100 DEFAULT CHARACTER SET latin1 */;
+
+USE `mso_requests`;
+
+SOURCE ../../main-schemas/MySQL-Requests-schema.sql
+
+--
+-- Current Database: `mso_catalog`
+--
+
+DROP DATABASE IF EXISTS `mso_catalog`;
+
+CREATE DATABASE /*!32312 IF NOT EXISTS*/ `mso_catalog` /*!40100 DEFAULT CHARACTER SET latin1 */;
+
+USE `mso_catalog`;
+
+SOURCE ../../main-schemas/MySQL-Catalog-schema.sql
+
+SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `TIMEOUT_MINUTES`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','Contrail30-l2nodhcp','1',NULL,'heat_template_version: 2015-04-30\n\ndescription:\n HOT template that creates a Contrail Virtual Network with L2NODHCP\n\nparameters:\n network_name:\n type: string\n description: Name of direct network (e.g. core, dmz)\n default: ECOMPNetwork\n shared:\n type: boolean\n description: Shared amongst tenants\n default: False\n external:\n type: boolean\n description: router_external for the VirtualNetwork\n default: False\n route_targets:\n type: comma_delimited_list\n description: Network route-targets (RT)\n default: \"\"\n subnet_list:\n type: json\n description: Network subnets\n default: []\n policy_refs:\n type: comma_delimited_list\n description: Policies referenced by Network\n default: \"\"\n policy_refsdata:\n type: json\n description: Policies referenced by Network\n default: []\n route_table_refs:\n type: comma_delimited_list\n description: Route Tables referenced by Network\n default: \"\"\n virtual_network_properties_allow_transit:\n type: boolean\n description: allow_transit for the VirtualNetwork\n default: True\n virtual_network_properties_forwarding_mode:\n type: string\n description: forwarding_mode for the VirtualNetwork\n default: l2\n virtual_network_properties_rpf:\n type: string\n description: rpf for the VirtualNetwork\n default: disable\n flood_unknown_unicast:\n type: boolean\n description: flood_unknown_unicast for the VirtualNetwork\n default: True\n\noutputs:\n network_id:\n description: Openstack network identifier\n value: { get_resource: network }\n network_fqdn:\n description: Openstack network identifier\n value: {list_join: [\':\', { get_attr: [network, fq_name] } ] }\n\nresources:\n networkIpam:\n type: OS::ContrailV2::NetworkIpam\n properties:\n name: { get_param: network_name }\n\n network:\n type: OS::ContrailV2::VirtualNetwork\n properties:\n name: { get_param: network_name }\n is_shared: {get_param: shared}\n router_external: { get_param: external }\n route_target_list:\n {\n route_target_list_route_target: { get_param: route_targets }\n }\n network_ipam_refs: [{ get_resource: networkIpam }]\n network_ipam_refs_data:\n [\n {\n network_ipam_refs_data_ipam_subnets: { get_param: subnet_list }\n }\n ]\n network_policy_refs: { get_param: policy_refs }\n network_policy_refs_data: { get_param: policy_refsdata }\n route_table_refs: { get_param: route_table_refs }\n flood_unknown_unicast: { get_param: flood_unknown_unicast } \n virtual_network_properties:\n {\n virtual_network_properties_allow_transit: { get_param: virtual_network_properties_allow_transit },\n virtual_network_properties_forwarding_mode: { get_param: virtual_network_properties_forwarding_mode },\n virtual_network_properties_rpf: { get_param: virtual_network_properties_rpf },\n }\n',10,'MANUAL RECORD','2017-10-05 18:52:03');
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `TIMEOUT_MINUTES`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','Contrail30-gndirect','1',NULL,'heat_template_version: 2015-04-30\n\ndescription:\n HOT template that creates a Contrail Virtual Network for GNDIRECT\n\nparameters:\n network_name:\n type: string\n description: Name of direct network (e.g. core, dmz)\n default: ECOMPNetwork\n shared:\n type: boolean\n description: Shared amongst tenants\n default: False\n external:\n type: boolean\n description: router_external for the VirtualNetwork\n default: False\n route_targets:\n type: comma_delimited_list\n description: Network route-targets (RT)\n default: \"\"\n subnet_list:\n type: json\n description: Network subnets\n default: []\n policy_refs:\n type: comma_delimited_list\n description: Policies referenced by Network\n default: \"\"\n policy_refsdata:\n type: json\n description: Policies referenced by Network\n default: []\n route_table_refs:\n type: comma_delimited_list\n description: Route Tables referenced by Network\n default: \"\"\n virtual_network_properties_rpf:\n type: string\n description: rpf for the VirtualNetwork\n default: disable\n\noutputs:\n network_id:\n description: Openstack network identifier\n value: { get_resource: network }\n network_fqdn:\n description: Openstack network identifier\n value: {list_join: [\':\', { get_attr: [network, fq_name] } ] }\n\nresources:\n networkIpam:\n type: OS::ContrailV2::NetworkIpam\n properties:\n name: { get_param: network_name }\n\n network:\n type: OS::ContrailV2::VirtualNetwork\n properties:\n name: { get_param: network_name }\n is_shared: {get_param: shared}\n router_external: { get_param: external }\n route_target_list:\n {\n route_target_list_route_target: { get_param: route_targets }\n }\n network_ipam_refs: [{ get_resource: networkIpam }]\n network_ipam_refs_data:\n [\n {\n network_ipam_refs_data_ipam_subnets: { get_param: subnet_list }\n }\n ]\n network_policy_refs: { get_param: policy_refs }\n network_policy_refs_data: { get_param: policy_refsdata }\n route_table_refs: { get_param: route_table_refs }\n virtual_network_properties:\n {\n virtual_network_properties_rpf: { get_param: virtual_network_properties_rpf }\n }\n',10,'MANUAL RECORD','2017-10-05 18:52:03');
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`,`NAME`,`VERSION`,`BODY`,`TIMEOUT_MINUTES`,`DESCRIPTION`,`CREATION_TIMESTAMP`,`ARTIFACT_CHECKSUM`) VALUES ('efee1d84-b8ec-11e7-abc4-cec278b6b50a','Generic NeutronNet','1','heat_template_version: 2013-05-23\n\ndescription:\n HOT template that creates a Generic Neutron Network\n\nparameters:\n network_name:\n type: string\n description: Name of direct network (e.g. core, dmz)\n default: ECOMPNetwork\n network_subnet_name:\n type: string\n description: Name of subnet network (e.g. core, dmz)\n default: ECOMPNetwork\n network_subnet_cidr:\n type: string\n description: CIDR of subnet network (e.g. core, dmz)\n default: 10.0.0.0/16\n\noutputs:\n network_id:\n description: Openstack network identifier\n value: { get_resource: network }\n network_fqdn:\n description: Openstack network identifier\n value: {list_join: [\':\', { get_attr: [network, fq_name] } ] }\n\nresources:\n network:\n type: OS::Neutron::Net\n properties:\n name: {get_param: network_name }\n\n subnet:\n type: OS::Neutron::Subnet\n properties:\n name: { get_param: network_subnet_name }\n network_id: { get_resource: network }\n cidr: { get_param: network_subnet_cidr }\n enable_dhcp: false\n',10,'Generic Neutron Template','2017-10-26 14:44:00', 'MANUAL RECORD');
+
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','external','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','flood_unknown_unicast','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','network_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','policy_refs','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','policy_refsdata','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','route_table_refs','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','route_targets','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','shared','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','virtual_network_properties_allow_transit','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','virtual_network_properties_forwarding_mode','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c198-a9fe-11e7-8b4b-0242ac120002','virtual_network_properties_rpf','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','external','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','network_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','policy_refs','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','policy_refsdata','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','route_table_refs','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','route_targets','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','shared','\0','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('4885c7a1-a9fe-11e7-8b4b-0242ac120002','virtual_network_properties_rpf','\0','string',NULL);
+
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (1,'CONTRAIL_BASIC','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (2,'CONTRAIL_BASIC','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (3,'CONTRAIL_BASIC','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (4,'CONTRAIL_SHARED','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (5,'CONTRAIL_SHARED','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (6,'CONTRAIL_SHARED','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (7,'CONTRAIL_EXTERNAL','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (8,'CONTRAIL_EXTERNAL','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (9,'CONTRAIL_EXTERNAL','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (10,'CONTRAIL30_BASIC','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (11,'CONTRAIL30_BASIC','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (12,'CONTRAIL30_BASIC','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (13,'CONTRAIL30_MPSCE','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (14,'CONTRAIL30_MPSCE','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (15,'CONTRAIL30_MPSCE','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (16,'VID_DEFAULT','createInstance','VID_DEFAULT recipe to create network if no custom BPMN flow is found','/mso/async/services/CreateNetworkInstance',NULL,180,NULL,'2017-10-05 18:52:03','1.0');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (17,'VID_DEFAULT','updateInstance','VID_DEFAULT recipe to update network if no custom BPMN flow is found','/mso/async/services/UpdateNetworkInstance',NULL,180,NULL,'2017-10-05 18:52:03','1.0');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (18,'VID_DEFAULT','deleteInstance','VID_DEFAULT recipe to delete network if no custom BPMN flow is found','/mso/async/services/DeleteNetworkInstance',NULL,180,NULL,'2017-10-05 18:52:03','1.0');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (19,'CONTRAIL30_L2NODHCP','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (20,'CONTRAIL30_L2NODHCP','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (21,'CONTRAIL30_L2NODHCP','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (22,'CONTRAIL30_GNDIRECT','CREATE',NULL,'/mso/async/services/CreateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (23,'CONTRAIL30_GNDIRECT','UPDATE',NULL,'/mso/async/services/UpdateNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+INSERT INTO `network_recipe` (`id`, `MODEL_NAME`, `ACTION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `NETWORK_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TYPE`, `CREATION_TIMESTAMP`, `VERSION_STR`) VALUES (24,'CONTRAIL30_GNDIRECT','DELETE',NULL,'/mso/async/services/DeleteNetworkV2',NULL,180,NULL,'2017-10-05 18:52:03','1');
+
+INSERT INTO `service` (`MODEL_UUID`, `MODEL_NAME`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `TOSCA_CSAR_ARTIFACT_UUID`) VALUES ('48cc36cc-a9fe-11e7-8b4b-0242ac120002','VID_DEFAULT','48cd56c8-a9fe-11e7-8b4b-0242ac120002','1.0','Default service for VID to use for infra APIH orchestration1707MIGRATED1707MIGRATED','2017-10-05 18:52:03',NULL);
+INSERT INTO `service` (`MODEL_UUID`, `MODEL_NAME`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `TOSCA_CSAR_ARTIFACT_UUID`) VALUES ('48cc3acd-a9fe-11e7-8b4b-0242ac120002','*','48ce2256-a9fe-11e7-8b4b-0242ac120002','1.0','Default service to use for infra APIH orchestration1707MIGRATED1707MIGRATED','2017-10-05 18:52:03',NULL);
+
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (1,'createInstance','1','VID_DEFAULT recipe to create service-instance if no custom BPMN flow is found','/mso/async/services/CreateGenericALaCarteServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','48cc36cc-a9fe-11e7-8b4b-0242ac120002');
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (2,'deleteInstance','1','VID_DEFAULT recipe to delete service-instance if no custom BPMN flow is found','/mso/async/services/DeleteGenericALaCarteServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','48cc36cc-a9fe-11e7-8b4b-0242ac120002');
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (3,'createInstance','1','DEFAULT recipe to create service-instance if no custom BPMN flow is found','/mso/async/services/CreateGenericALaCarteServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','48cc3acd-a9fe-11e7-8b4b-0242ac120002');
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (4,'deleteInstance','1','DEFAULT recipe to delete service-instance if no custom BPMN flow is found','/mso/async/services/DeleteGenericALaCarteServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','48cc3acd-a9fe-11e7-8b4b-0242ac120002');
+
+--
+-- Custom Reciepe for the VoLTE service added temporarily
+--
+
+INSERT INTO `service` (`MODEL_UUID`, `MODEL_NAME`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `TOSCA_CSAR_ARTIFACT_UUID`) VALUES ('dfcd7471-16c7-444e-8268-d4c50d90593a','UUI_DEFAULT','dfcd7471-16c7-444e-8268-d4c50d90593a','1.0','Default service for UUI to use for infra APIH orchestration1707MIGRATED1707MIGRATED','2017-10-23 18:52:03',NULL);
+
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (11,'createInstance','1','Custom recipe to create E2E service-instance if no custom BPMN flow is found','/mso/async/services/CreateCustomE2EServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','dfcd7471-16c7-444e-8268-d4c50d90593a');
+INSERT INTO `service_recipe` (`id`, `ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, `SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, `SERVICE_MODEL_UUID`) VALUES (12,'deleteInstance','1','Custom recipe to delete E2E service-instance if no custom BPMN flow is found','/mso/async/services/DeleteCustomE2EServiceInstance',NULL,180,NULL,'2017-10-05 18:52:03','dfcd7471-16c7-444e-8268-d4c50d90593a');
+
+INSERT INTO `temp_network_heat_template_lookup` (`NETWORK_RESOURCE_MODEL_NAME`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `AIC_VERSION_MIN`, `AIC_VERSION_MAX`) VALUES ('CONTRAIL30_GNDIRECT','4885c7a1-a9fe-11e7-8b4b-0242ac120002','3.0',NULL);
+INSERT INTO `temp_network_heat_template_lookup` (`NETWORK_RESOURCE_MODEL_NAME`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `AIC_VERSION_MIN`, `AIC_VERSION_MAX`) VALUES ('CONTRAIL30_L2NODHCP','4885c198-a9fe-11e7-8b4b-0242ac120002','3.0',NULL);
+INSERT INTO `temp_network_heat_template_lookup` (`NETWORK_RESOURCE_MODEL_NAME`, `HEAT_TEMPLATE_ARTIFACT_UUID`,`AIC_VERSION_MIN` , `AIC_VERSION_MAX` ) VALUES ('Generic NeutronNet','efee1d84-b8ec-11e7-abc4-cec278b6b50a','2.0','NULL');
+
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (1,'*','VOLUME_GROUP',NULL,'CREATE',NULL,'1','Recipe Match All for','/mso/async/services/createCinderVolumeV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (2,'*','VOLUME_GROUP',NULL,'DELETE',NULL,'1','Recipe Match All for','/mso/async/services/deleteCinderVolumeV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (3,'*','VOLUME_GROUP',NULL,'UPDATE',NULL,'1','Recipe Match All for','/mso/async/services/updateCinderVolumeV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (4,NULL,'VOLUME_GROUP',NULL,'CREATE_VF_MODULE_VOL',NULL,'1','Recipe Match All for','/mso/async/services/CreateVfModuleVolume',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (5,NULL,'VOLUME_GROUP',NULL,'DELETE_VF_MODULE_VOL',NULL,'1','Recipe Match All for','/mso/async/services/DeleteVfModuleVolume',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (6,NULL,'VOLUME_GROUP',NULL,'UPDATE_VF_MODULE_VOL',NULL,'1','Recipe Match All for','/mso/async/services/UpdateVfModuleVolume',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (7,NULL,'volumeGroup','VID_DEFAULT','createInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/CreateVfModuleVolumeInfraV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (8,NULL,'volumeGroup','VID_DEFAULT','deleteInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/DeleteVfModuleVolumeInfraV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (9,NULL,'volumeGroup','VID_DEFAULT','updateInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/UpdateVfModuleVolumeInfraV1',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (10,NULL,'vfModule','VID_DEFAULT','createInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/CreateVfModuleInfra',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (11,NULL,'vfModule','VID_DEFAULT','deleteInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/DeleteVfModuleInfra',null,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_components_recipe` (`id`, `VNF_TYPE`, `VNF_COMPONENT_TYPE`, `VF_MODULE_MODEL_UUID`, `ACTION`, `SERVICE_TYPE`, `VERSION`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_COMPONENT_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (12,NULL,'vfModule','VID_DEFAULT','updateInstance',NULL,'1','VID_DEFAULT recipe t','/mso/async/services/UpdateVfModuleInfra',null,180,'2017-10-05 18:52:03');
+
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (1,NULL,'CREATE',NULL,'1','*','Recipe Match All for VNFs if no custom flow exists','/mso/workflow/services/CreateGenericVNFV1',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (2,NULL,'DELETE',NULL,'1','*','Recipe Match All for VNFs if no custom flow exists','/mso/async/services//deleteGenericVNFV1',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (3,NULL,'UPDATE',NULL,'1','*','Recipe Match All for VNFs if no custom flow exists','/mso/workflow/services/updateGenericVNFV1',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (4,'*','CREATE_VF_MODULE',NULL,'1',NULL,'Recipe Match All for VNFs if no custom flow exists','/mso/async/services/CreateVfModule',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (5,'*','DELETE_VF_MODULE',NULL,'1',NULL,'Recipe Match All for VNFs if no custom flow exists','/mso/async/services/DeleteVfModule',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (6,'*','UPDATE_VF_MODULE',NULL,'1',NULL,'Recipe Match All for VNFs if no custom flow exists','/mso/async/services/UpdateVfModule',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (7,NULL,'createInstance',NULL,'1','VID_DEFAULT','VID_DEFAULT recipe to create VNF if no custom BPMN flow is found','/mso/async/services/CreateVnfInfra',NULL,180,'2017-10-05 18:52:03');
+INSERT INTO `vnf_recipe` (`id`, `VF_MODULE_ID`, `ACTION`, `SERVICE_TYPE`, `VERSION_STR`, `VNF_TYPE`, `DESCRIPTION`, `ORCHESTRATION_URI`, `VNF_PARAM_XSD`, `RECIPE_TIMEOUT`, `CREATION_TIMESTAMP`) VALUES (8,NULL,'deleteInstance',NULL,'1','VID_DEFAULT','VID_DEFAULT recipe to delete VNF if no custom BPMN flow is found','/mso/async/services/DeleteVnfInfra',NULL,180,'2017-10-05 18:52:03');
+
+SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-dns/create_mso_db-demo-dns.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-dns/create_mso_db-demo-dns.sql
new file mode 100644
index 000000000..b5063defd
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-dns/create_mso_db-demo-dns.sql
@@ -0,0 +1,77 @@
+SOURCE ../default/create_mso_db-default.sql
+
+USE `mso_requests`;
+DROP USER 'mso';
+CREATE USER 'mso';
+GRANT ALL on mso_requests.* to 'mso' identified by 'mso123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+USE `mso_catalog`;
+DROP USER 'catalog';
+CREATE USER 'catalog';
+GRANT ALL on mso_catalog.* to 'catalog' identified by 'catalog123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
+
+INSERT INTO `heat_environment` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('EnvArtifact-UUID1','base_vlb.env','1.0','BASE VLB ENV file','parameters:\n vlb_image_name: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)\n vlb_flavor_name: 4 GB General Purpose v1\n public_net_id: 00000000-0000-0000-0000-000000000000\n vlb_private_net_id: zdfw1lb01_private\n ecomp_private_net_id: oam_ecomp\n vlb_private_net_cidr: 192.168.10.0/24\n ecomp_private_net_cidr: 192.168.9.0/24\n vlb_private_ip_0: 192.168.10.111\n vlb_private_ip_1: 192.168.9.111\n vdns_private_ip_0: 192.168.10.211\n vdns_private_ip_1: 192.168.9.211\n vlb_name_0: zdfw1lb01lb01\n vdns_name_0: zdfw1lb01dns01\n vnf_id: vLoadBalancer_demo_app\n vf_module_id: vLoadBalancer\n webserver_ip: 162.242.237.182\n dcae_collector_ip: 192.168.9.1\n key_name: vlb_key\n pub_key: INSERT YOUR PUBLIC KEY HERE','MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_environment` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('EnvArtifact-UUID2','dnsscaling.env','1.0','DNS Scaling ENV file','parameters:\n vlb_image_name: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)\n vlb_flavor_name: 4 GB General Purpose v1\n public_net_id: 00000000-0000-0000-0000-000000000000\n vlb_private_net_id: zdfw1lb01_private\n ecomp_private_net_id: oam_ecomp\n vlb_private_ip_0: 192.168.10.111\n vlb_private_ip_1: 192.168.9.111\n vdns_private_ip_0: 192.168.10.222\n vdns_private_ip_1: 192.168.9.222\n vdns_name_0: zdfw1lb01dns02\n vnf_id: vLoadBalancer_demo_app\n vf_module_id: vLoadBalancer\n webserver_ip: 162.242.237.182\n dcae_collector_ip: 192.168.9.1\n key_name: vlb_key\n pub_key: INSERT YOUR PUBLIC KEY HERE','MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `TIMEOUT_MINUTES`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('Artifact-UUID1','base_vlb.yaml','1.0','Base VLB Heat','heat_template_version: 2013-05-23\n\ndescription: Heat template to deploy vLoadBalancer/vDNS demo app for OpenECOMP\n\nparameters:\n vlb_image_name:\n type: string\n label: Image name or ID\n description: Image to be used for compute instance\n vlb_flavor_name:\n type: string\n label: Flavor\n description: Type of instance (flavor) to be used\n public_net_id:\n type: string\n label: Public network name or ID\n description: Public network that enables remote connection to VNF\n vlb_private_net_id:\n type: string\n label: vLoadBalancer private network name or ID\n description: Private network that connects vLoadBalancer with vDNSs\n ecomp_private_net_id:\n type: string\n label: ECOMP management network name or ID\n description: Private network that connects ECOMP component and the VNF\n vlb_private_net_cidr:\n type: string\n label: vLoadBalancer private network CIDR\n description: The CIDR of the vLoadBalancer private network\n ecomp_private_net_cidr:\n type: string\n label: ECOMP private network CIDR\n description: The CIDR of the protected private network\n vlb_private_ip_0:\n type: string\n label: vLoadBalancer private IP address towards the private network\n description: Private IP address that is assigned to the vLoadBalancer to communicate with the vDNSs\n vlb_private_ip_1:\n type: string\n label: vLoadBalancer private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vLoadBalancer to communicate with ECOMP components\n vdns_private_ip_0:\n type: string\n label: vDNS private IP address towards the private network\n description: Private IP address that is assigned to the vDNS to communicate with the vLoadBalancer\n vdns_private_ip_1:\n type: string\n label: vDNS private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vDNS to communicate with ECOMP components\n vlb_name_0:\n type: string\n label: vLoadBalancer name\n description: Name of the vLoadBalancer\n vdns_name_0:\n type: string\n label: vDNS name\n description: Name of the vDNS\n vnf_id:\n type: string\n label: VNF ID\n description: The VNF ID is provided by ECOMP\n vf_module_id:\n type: string\n label: vFirewall module ID\n description: The vLoadBalancer Module ID is provided by ECOMP\n webserver_ip:\n type: string\n label: Webserver IP address\n description: IP address of the webserver that hosts the source code and binaries\n dcae_collector_ip:\n type: string\n label: DCAE collector IP address\n description: IP address of the DCAE collector\n key_name:\n type: string\n label: Key pair name\n description: Public/Private key pair name\n pub_key:\n type: string\n label: Public key\n description: Public key to be installed on the compute instance\n\nresources:\n my_keypair:\n type: OS::Nova::KeyPair\n properties:\n name: { get_param: key_name }\n public_key: { get_param: pub_key }\n save_private_key: false\n\n vlb_private_network:\n type: OS::Neutron::Net\n properties:\n name: { get_param: vlb_private_net_id }\n\n vlb_private_subnet:\n type: OS::Neutron::Subnet\n properties:\n name: { get_param: vlb_private_net_id }\n network_id: { get_resource: vlb_private_network }\n cidr: { get_param: vlb_private_net_cidr }\n\n vlb_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vlb_image_name }\n flavor: { get_param: vlb_flavor_name }\n name: { get_param: vlb_name_0 }\n key_name: { get_resource: my_keypair }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vlb_private_0_port }\n - port: { get_resource: vlb_private_1_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __dcae_collector_ip__: { get_param: dcae_collector_ip }\n __local_private_ipaddr__: { get_param: vlb_private_ip_0 }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n DCAE_COLLECTOR_IP=__dcae_collector_ip__\n LOCAL_PRIVATE_IPADDR=__local_private_ipaddr__\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_lb_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vlb.sh\n chmod +x v_lb_init.sh\n chmod +x vlb.sh\n echo $WEBSERVER_IP > config/webserver_ip.txt\n echo $DCAE_COLLECTOR_IP > config/dcae_collector_ip.txt\n echo $LOCAL_PRIVATE_IPADDR > config/local_private_ipaddr.txt\n echo \"no\" > config/install.txt\n LOCAL_PUBLIC_IPADDR=$(ifconfig eth0 | grep \"inet addr\" | tr -s \' \' | cut -d\' \' -f3 | cut -d\':\' -f2)\n echo $LOCAL_PUBLIC_IPADDR > config/local_public_ipaddr.txt\n mv vlb.sh /etc/init.d\n update-rc.d vlb.sh defaults\n ./v_lb_init.sh\n\n vlb_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: vlb_private_network }\n fixed_ips: [{\"subnet\": { get_resource: vlb_private_subnet }, \"ip_address\": { get_param: vlb_private_ip_0 }}]\n\n vlb_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vlb_private_ip_1 }}]\n\n vdns_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vlb_image_name }\n flavor: { get_param: vlb_flavor_name }\n name: { get_param: vdns_name_0 }\n key_name: { get_resource: my_keypair }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vdns_private_0_port }\n - port: { get_resource: vdns_private_1_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __lb_oam_int__ : { get_param: vlb_private_ip_1 }\n __lb_private_ipaddr__: { get_param: vlb_private_ip_0 }\n __local_private_ipaddr__: { get_param: vdns_private_ip_0 }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n LB_OAM_INT=__lb_oam_int__\n LB_PRIVATE_IPADDR=__lb_private_ipaddr__\n LOCAL_PRIVATE_IPADDR=__local_private_ipaddr__\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_dns_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vdns.sh\n chmod +x v_dns_init.sh\n chmod +x vdns.sh\n echo $WEBSERVER_IP > config/webserver_ip.txt\n echo $LB_OAM_INT > config/lb_oam_int.txt\n echo $LB_PRIVATE_IPADDR > config/lb_private_ipaddr.txt\n echo $LOCAL_PRIVATE_IPADDR > config/local_private_ipaddr.txt\n echo \"no\" > config/install.txt\n mv vdns.sh /etc/init.d\n update-rc.d vdns.sh defaults\n ./v_dns_init.sh\n\n vdns_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: vlb_private_network }\n fixed_ips: [{\"subnet\": { get_resource: vlb_private_subnet }, \"ip_address\": { get_param: vdns_private_ip_0 }}]\n\n vdns_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vdns_private_ip_1 }}]\n',300,'MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `TIMEOUT_MINUTES`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('Artifact-UUID2','dnsscaling.yaml','1.0','DNS Scaling Heat','heat_template_version: 2013-05-23\n\ndescription: Heat template to deploy a vDNS for OpenECOMP (scaling-up scenario)\n\nparameters:\n vlb_image_name:\n type: string\n label: Image name or ID\n description: Image to be used for compute instance\n vlb_flavor_name:\n type: string\n label: Flavor\n description: Type of instance (flavor) to be used\n public_net_id:\n type: string\n label: Public network name or ID\n description: Public network that enables remote connection to VNF\n vlb_private_net_id:\n type: string\n label: vLoadBalancer private network name or ID\n description: Private network that connects vLoadBalancer with vDNSs\n ecomp_private_net_id:\n type: string\n label: ECOMP management network name or ID\n description: Private network that connects ECOMP component and the VNF\n vlb_private_ip_0:\n type: string\n label: vLoadBalancer private IP address towards the private network\n description: Private IP address that is assigned to the vLoadBalancer to communicate with the vDNSs\n vlb_private_ip_1:\n type: string\n label: vLoadBalancer private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vLoadBalancer to communicate with ECOMP components\n vdns_private_ip_0:\n type: string\n label: vDNS private IP address towards the private network\n description: Private IP address that is assigned to the vDNS to communicate with the vLoadBalancer\n vdns_private_ip_1:\n type: string\n label: vDNS private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vDNS to communicate with ECOMP components\n vdns_name_0:\n type: string\n label: vDNS name\n description: Name of the vDNS\n vnf_id:\n type: string\n label: VNF ID\n description: The VNF ID is provided by ECOMP\n vf_module_id:\n type: string\n label: vFirewall module ID\n description: The vLoadBalancer Module ID is provided by ECOMP\n webserver_ip:\n type: string\n label: Webserver IP address\n description: IP address of the webserver that hosts the source code and binaries\n dcae_collector_ip:\n type: string\n label: DCAE collector IP address\n description: IP address of the DCAE collector\n key_name:\n type: string\n label: Key pair name\n description: Public/Private key pair name\n pub_key:\n type: string\n label: Public key\n description: Public key to be installed on the compute instance\n\nresources:\n vdns_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vlb_image_name }\n flavor: { get_param: vlb_flavor_name }\n name: { get_param: vdns_name_0 }\n key_name: { get_param: key_name }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vdns_private_0_port }\n - port: { get_resource: vdns_private_1_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __lb_oam_int__ : { get_param: vlb_private_ip_1 }\n __lb_private_ipaddr__: { get_param: vlb_private_ip_0 }\n __local_private_ipaddr__: { get_param: vdns_private_ip_0 }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n LB_OAM_INT=__lb_oam_int__\n LB_PRIVATE_IPADDR=__lb_private_ipaddr__\n LOCAL_PRIVATE_IPADDR=__local_private_ipaddr__\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_dns_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vdns.sh\n chmod +x v_dns_init.sh\n chmod +x vdns.sh\n echo $WEBSERVER_IP > config/webserver_ip.txt\n echo $LB_OAM_INT > config/lb_oam_int.txt\n echo $LB_PRIVATE_IPADDR > config/lb_private_ipaddr.txt\n echo $LOCAL_PRIVATE_IPADDR > config/local_private_ipaddr.txt\n echo \"no\" > config/install.txt\n mv vdns.sh /etc/init.d\n update-rc.d vdns.sh defaults\n ./v_dns_init.sh\n\n vdns_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: vlb_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: vlb_private_net_id }, \"ip_address\": { get_param: vdns_private_ip_0 }}]\n\n vdns_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vdns_private_ip_1 }}]\n',300,'MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','dcae_collector_ip','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','ecomp_private_net_cidr','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','ecomp_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','key_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','public_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','pub_key','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vdns_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vdns_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vdsn_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vf_module_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_flavor_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_image_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_private_net_cidr','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vlb_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','vnf_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID1','webserver_ip','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','dcae_collector_ip','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','ecomp_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','key_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','public_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','pub_key','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vdns_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vdns_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vdsn_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vf_module_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vlb_flavor_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vlb_image_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vlb_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vlb_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','vnf_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID2','webserver_ip','','string',NULL);
+
+INSERT INTO `service` (`MODEL_UUID`, `MODEL_NAME`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `TOSCA_CSAR_ARTIFACT_UUID`) VALUES ('1e34774e-715e-4fd6-bd09-7b654622f35i','dns-service','585822c8-4027-4f84-ba50-e9248606f111','1.0','dns service for unit test','2016-11-14 13:04:07',NULL);
+
+INSERT INTO `service_to_resource_customizations` (`SERVICE_MODEL_UUID`, `RESOURCE_MODEL_CUSTOMIZATION_UUID`, `MODEL_TYPE`, `CREATION_TIMESTAMP`) VALUES ('1e34774e-715e-4fd6-bd09-7b654622f35i','302aa6be-a9fe-11e7-8b4b-0242ac120002','vnf','2017-10-05 18:51:28');
+
+INSERT INTO `vf_module` (`MODEL_UUID`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `DESCRIPTION`, `IS_BASE`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `VOL_HEAT_TEMPLATE_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`) VALUES ('1e34774e-715e-4fd5-bd08-7b654622f33e.VF_RI1_DNS::module-1::module-1.group','585822c7-4027-4f84-ba50-e9248606f132','1.0','VF_RI1_DNS::module-1',NULL,1,'Artifact-UUID1',NULL,'2016-11-14 13:04:07','585822c7-4027-4f84-ba50-e9248606f131');
+INSERT INTO `vf_module` (`MODEL_UUID`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `DESCRIPTION`, `IS_BASE`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `VOL_HEAT_TEMPLATE_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`) VALUES ('1e34774e-715e-4fd5-bd08-7b654622f33e.VF_RI1_DNS::module-2::module-1.group','585822c7-4027-4f84-ba50-e9248606f133','1.0','VF_RI1_DNS::module-2',NULL,0,'Artifact-UUID2',NULL,'2016-11-14 13:04:07','585822c7-4027-4f84-ba50-e9248606f131');
+
+INSERT INTO `vf_module_customization` (`MODEL_CUSTOMIZATION_UUID`, `LABEL`, `INITIAL_COUNT`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_COUNT`, `HEAT_ENVIRONMENT_ARTIFACT_UUID`, `VOL_ENVIRONMENT_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VF_MODULE_MODEL_UUID`) VALUES ('30316d81-a9fe-11e7-8b4b-0242ac120002',NULL,1,0,NULL,NULL,'EnvArtifact-UUID1',NULL,'2017-10-05 18:51:25','1e34774e-715e-4fd5-bd08-7b654622f33e.VF_RI1_DNS::module-1::module-1.group');
+INSERT INTO `vf_module_customization` (`MODEL_CUSTOMIZATION_UUID`, `LABEL`, `INITIAL_COUNT`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_COUNT`, `HEAT_ENVIRONMENT_ARTIFACT_UUID`, `VOL_ENVIRONMENT_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VF_MODULE_MODEL_UUID`) VALUES ('303170ae-a9fe-11e7-8b4b-0242ac120002',NULL,0,0,NULL,NULL,'EnvArtifact-UUID2',NULL,'2017-10-05 18:51:25','1e34774e-715e-4fd5-bd08-7b654622f33e.VF_RI1_DNS::module-2::module-1.group');
+INSERT INTO `vnf_res_custom_to_vf_module_custom` (`VNF_RESOURCE_CUST_MODEL_CUSTOMIZATION_UUID`, `VF_MODULE_CUST_MODEL_CUSTOMIZATION_UUID`, `CREATION_TIMESTAMP`) VALUES ('302aa6be-a9fe-11e7-8b4b-0242ac120002','30316d81-a9fe-11e7-8b4b-0242ac120002','2017-10-05 18:51:26');
+
+INSERT INTO `vnf_res_custom_to_vf_module_custom` (`VNF_RESOURCE_CUST_MODEL_CUSTOMIZATION_UUID`, `VF_MODULE_CUST_MODEL_CUSTOMIZATION_UUID`, `CREATION_TIMESTAMP`) VALUES ('302aa6be-a9fe-11e7-8b4b-0242ac120002','303170ae-a9fe-11e7-8b4b-0242ac120002','2017-10-05 18:51:26');
+
+INSERT INTO `vnf_resource` (`ORCHESTRATION_MODE`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `MODEL_UUID`, `AIC_VERSION_MIN`, `AIC_VERSION_MAX`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `TOSCA_NODE_TYPE`, `HEAT_TEMPLATE_ARTIFACT_UUID`) VALUES ('HEAT','dns service for unit test1707MIGRATED','2016-11-14 13:04:07','585822c7-4027-4f84-ba50-e9248606f131',NULL,NULL,'585822c7-4027-4f84-ba50-e9248606f112','1.0','DNSResource',NULL,NULL);
+
+INSERT INTO `vnf_resource_customization` (`MODEL_CUSTOMIZATION_UUID`, `MODEL_INSTANCE_NAME`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_MAX_COUNT`, `NF_TYPE`, `NF_ROLE`, `NF_FUNCTION`, `NF_NAMING_CODE`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`) VALUES ('302aa6be-a9fe-11e7-8b4b-0242ac120002','DNSResource-1',NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2017-10-05 18:51:25','585822c7-4027-4f84-ba50-e9248606f131');
+
+SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-vfw/create_mso_db-demo-vfw.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-vfw/create_mso_db-demo-vfw.sql
new file mode 100644
index 000000000..15001050b
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/bulkload-files/demo-vfw/create_mso_db-demo-vfw.sql
@@ -0,0 +1,59 @@
+SOURCE ../default/create_mso_db-default.sql
+
+USE `mso_requests`;
+DROP USER 'mso';
+CREATE USER 'mso';
+GRANT ALL on mso_requests.* to 'mso' identified by 'mso123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+USE `mso_catalog`;
+DROP USER 'catalog';
+CREATE USER 'catalog';
+GRANT ALL on mso_catalog.* to 'catalog' identified by 'catalog123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
+
+INSERT INTO `heat_environment` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('EnvArtifact-UUID3','base_vfw.env','1.0','base_vfw ENV file','parameters:\n vfw_image_name: Ubuntu 14.04 LTS (Trusty Tahr) (PVHVM)\n vfw_flavor_name: 4 GB General Purpose v1\n public_net_id: 00000000-0000-0000-0000-000000000000\n unprotected_private_net_id: zdfw1fwl01_unprotected\n protected_private_net_id: zdfw1fwl01_protected\n ecomp_private_net_id: oam_ecomp\n unprotected_private_net_cidr: 192.168.10.0/24\n protected_private_net_cidr: 192.168.20.0/24\n ecomp_private_net_cidr: 192.168.9.0/24\n vfw_private_ip_0: 192.168.10.100\n vfw_private_ip_1: 192.168.20.100\n vfw_private_ip_2: 192.168.9.100\n vpg_private_ip_0: 192.168.10.200\n vpg_private_ip_1: 192.168.9.200\n vsn_private_ip_0: 192.168.20.250\n vsn_private_ip_1: 192.168.9.250\n vfw_name_0: zdfw1fwl01fwl01\n vpg_name_0: zdfw1fwl01pgn01\n vsn_name_0: zdfw1fwl01snk01\n vnf_id: vFirewall_demo_app\n vf_module_id: vFirewall\n webserver_ip: 162.242.237.182\n dcae_collector_ip: 192.168.9.1\n key_name: vfw_key\n pub_key: INSERT YOUR PUBLIC KEY HERE','MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_template` (`ARTIFACT_UUID`, `NAME`, `VERSION`, `DESCRIPTION`, `BODY`, `TIMEOUT_MINUTES`, `ARTIFACT_CHECKSUM`, `CREATION_TIMESTAMP`) VALUES ('Artifact-UUID3','base_vfw.yaml','1.0','Base VFW Heat','heat_template_version: 2013-05-23\n\ndescription: Heat template to deploy vFirewall demo app for OpenECOMP\n\nparameters:\n vfw_image_name:\n type: string\n label: Image name or ID\n description: Image to be used for compute instance\n vfw_flavor_name:\n type: string\n label: Flavor\n description: Type of instance (flavor) to be used\n public_net_id:\n type: string\n label: Public network name or ID\n description: Public network that enables remote connection to VNF\n unprotected_private_net_id:\n type: string\n label: Unprotected private network name or ID\n description: Private network that connects vPacketGenerator with vFirewall\n protected_private_net_id:\n type: string\n label: Protected private network name or ID\n description: Private network that connects vFirewall with vSink\n ecomp_private_net_id:\n type: string\n label: ECOMP management network name or ID\n description: Private network that connects ECOMP component and the VNF\n unprotected_private_net_cidr:\n type: string\n label: Unprotected private network CIDR\n description: The CIDR of the unprotected private network\n protected_private_net_cidr:\n type: string\n label: Protected private network CIDR\n description: The CIDR of the protected private network\n ecomp_private_net_cidr:\n type: string\n label: ECOMP private network CIDR\n description: The CIDR of the protected private network\n vfw_private_ip_0:\n type: string\n label: vFirewall private IP address towards the unprotected network\n description: Private IP address that is assigned to the vFirewall to communicate with the vPacketGenerator\n vfw_private_ip_1:\n type: string\n label: vFirewall private IP address towards the protected network\n description: Private IP address that is assigned to the vFirewall to communicate with the vSink\n vfw_private_ip_2:\n type: string\n label: vFirewall private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vFirewall to communicate with ECOMP components\n vpg_private_ip_0:\n type: string\n label: vPacketGenerator private IP address towards the unprotected network\n description: Private IP address that is assigned to the vPacketGenerator to communicate with the vFirewall\n vpg_private_ip_1:\n type: string\n label: vPacketGenerator private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vPacketGenerator to communicate with ECOMP components\n vsn_private_ip_0:\n type: string\n label: vSink private IP address towards the protected network\n description: Private IP address that is assigned to the vSink to communicate with the vFirewall\n vsn_private_ip_1:\n type: string\n label: vSink private IP address towards the ECOMP management network\n description: Private IP address that is assigned to the vSink to communicate with ECOMP components\n vfw_name_0:\n type: string\n label: vFirewall name\n description: Name of the vFirewall\n vpg_name_0:\n type: string\n label: vPacketGenerator name\n description: Name of the vPacketGenerator\n vsn_name_0:\n type: string\n label: vSink name\n description: Name of the vSink\n vnf_id:\n type: string\n label: VNF ID\n description: The VNF ID is provided by ECOMP\n vf_module_id:\n type: string\n label: vFirewall module ID\n description: The vFirewall Module ID is provided by ECOMP\n webserver_ip:\n type: string\n label: Webserver IP address\n description: IP address of the webserver that hosts the source code and binaries\n dcae_collector_ip:\n type: string\n label: DCAE collector IP address\n description: IP address of the DCAE collector\n key_name:\n type: string\n label: Key pair name\n description: Public/Private key pair name\n pub_key:\n type: string\n label: Public key\n description: Public key to be installed on the compute instance\n\nresources:\n my_keypair:\n type: OS::Nova::KeyPair\n properties:\n name: { get_param: key_name }\n public_key: { get_param: pub_key }\n save_private_key: false\n\n unprotected_private_network:\n type: OS::Neutron::Net\n properties:\n name: { get_param: unprotected_private_net_id }\n\n protected_private_network:\n type: OS::Neutron::Net\n properties:\n name: { get_param: protected_private_net_id }\n\n unprotected_private_subnet:\n type: OS::Neutron::Subnet\n properties:\n network_id: { get_resource: unprotected_private_network }\n cidr: { get_param: unprotected_private_net_cidr }\n\n protected_private_subnet:\n type: OS::Neutron::Subnet\n properties:\n network_id: { get_resource: protected_private_network }\n cidr: { get_param: protected_private_net_cidr }\n\n vfw_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vfw_image_name }\n flavor: { get_param: vfw_flavor_name }\n name: { get_param: vfw_name_0 }\n key_name: { get_resource: my_keypair }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vfw_private_0_port }\n - port: { get_resource: vfw_private_1_port }\n - port: { get_resource: vfw_private_2_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __dcae_collector_ip__ : { get_param: dcae_collector_ip }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n DCAE_COLLECTOR_IP=__dcae_collector_ip__\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_firewall_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vfirewall.sh\n chmod +x v_firewall_init.sh\n chmod +x vfirewall.sh\n echo $WEBSERVER_IP > config/webserver_ip.txt\n echo $DCAE_COLLECTOR_IP > config/dcae_collector_ip.txt\n echo \"no\" > config/install.txt\n mv vfirewall.sh /etc/init.d\n sudo update-rc.d vfirewall.sh defaults\n ./v_firewall_init.sh\n\n vfw_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: unprotected_private_network }\n fixed_ips: [{\"subnet\": { get_resource: unprotected_private_subnet }, \"ip_address\": { get_param: vfw_private_ip_0 }}]\n\n vfw_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: protected_private_network }\n fixed_ips: [{\"subnet\": { get_resource: protected_private_subnet }, \"ip_address\": { get_param: vfw_private_ip_1 }}]\n\n vfw_private_2_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vfw_private_ip_2 }}]\n\n vpg_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vfw_image_name }\n flavor: { get_param: vfw_flavor_name }\n name: { get_param: vpg_name_0 }\n key_name: { get_resource: my_keypair }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vpg_private_0_port }\n - port: { get_resource: vpg_private_1_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __fw_ipaddr__: { get_param: vfw_private_ip_0 }\n __protected_net_cidr__: { get_param: protected_private_net_cidr }\n __sink_ipaddr__: { get_param: vsn_private_ip_0 }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n FW_IPADDR=__fw_ipaddr__\n PROTECTED_NET_CIDR=__protected_net_cidr__\n SINK_IPADDR=__sink_ipaddr__\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_packetgen_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vpacketgen.sh\n chmod +x v_packetgen_init.sh\n chmod +x vpacketgen.sh\n echo $WEBSERVER_IP > config/webserver_ip.txt\n echo $FW_IPADDR > config/fw_ipaddr.txt\n echo $PROTECTED_NET_CIDR > config/protected_net_cidr.txt\n echo $SINK_IPADDR > config/sink_ipaddr.txt\n echo \"no\" > config/install.txt\n mv vpacketgen.sh /etc/init.d\n sudo update-rc.d vpacketgen.sh defaults\n ./v_packetgen_init.sh\n\n vpg_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: unprotected_private_network }\n fixed_ips: [{\"subnet\": { get_resource: unprotected_private_subnet }, \"ip_address\": { get_param: vpg_private_ip_0 }}]\n\n vpg_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vpg_private_ip_1 }}]\n\n vsn_0:\n type: OS::Nova::Server\n properties:\n image: { get_param: vfw_image_name }\n flavor: { get_param: vfw_flavor_name }\n name: { get_param: vsn_name_0 }\n key_name: { get_resource: my_keypair }\n networks:\n - network: { get_param: public_net_id }\n - port: { get_resource: vsn_private_0_port }\n - port: { get_resource: vsn_private_1_port }\n metadata: {vnf_id: { get_param: vnf_id }, vf_module_id: { get_param: vf_module_id }}\n user_data_format: RAW\n user_data:\n str_replace:\n params:\n __webserver__: { get_param: webserver_ip }\n __protected_net_gw__: { get_param: vfw_private_ip_1 }\n __unprotected_net__: { get_param: unprotected_private_net_cidr }\n template: |\n #!/bin/bash\n\n WEBSERVER_IP=__webserver__\n PROTECTED_NET_GW=__protected_net_gw__\n UNPROTECTED_NET=__unprotected_net__\n UNPROTECTED_NET=$(echo $UNPROTECTED_NET | cut -d\'/\' -f1)\n\n mkdir /opt/config\n cd /opt\n wget http://$WEBSERVER_IP/demo_repo/v_sink_init.sh\n wget http://$WEBSERVER_IP/demo_repo/vsink.sh\n chmod +x v_sink_init.sh\n chmod +x vsink.sh\n echo $PROTECTED_NET_GW > config/protected_net_gw.txt\n echo $UNPROTECTED_NET > config/unprotected_net.txt\n echo \"no\" > config/install.txt\n mv vsink.sh /etc/init.d\n sudo update-rc.d vsink.sh defaults\n ./v_sink_init.sh\n\n vsn_private_0_port:\n type: OS::Neutron::Port\n properties:\n network: { get_resource: protected_private_network }\n fixed_ips: [{\"subnet\": { get_resource: protected_private_subnet }, \"ip_address\": { get_param: vsn_private_ip_0 }}]\n\n vsn_private_1_port:\n type: OS::Neutron::Port\n properties:\n network: { get_param: ecomp_private_net_id }\n fixed_ips: [{\"subnet\": { get_param: ecomp_private_net_id }, \"ip_address\": { get_param: vsn_private_ip_1 }}]\n \n',300,'MANUAL RECORD','2016-11-14 13:04:07');
+
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','dcae_collector_ip','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','ecomp_private_net_cidr','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','ecomp_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','key_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','protected_private_net_cidr','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','protected_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','public_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','pub_key','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','unprotected_private_net_cidr','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','unprotected_private_net_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_flavor_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_image_name','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vfw_private_ip_2','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vf_module_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vnf_id','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vpg_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vpg_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vpg_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vsn_name_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vsn_private_ip_0','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','vsn_private_ip_1','','string',NULL);
+INSERT INTO `heat_template_params` (`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, `PARAM_TYPE`, `PARAM_ALIAS`) VALUES ('Artifact-UUID3','webserver_ip','','string',NULL);
+
+INSERT INTO `service` (`MODEL_UUID`, `MODEL_NAME`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `TOSCA_CSAR_ARTIFACT_UUID`) VALUES ('2e34774e-715e-4fd5-bd09-7b654622f35i','vfw-service','585822c7-4027-4f84-ba50-e9248606f112','1.0','VFW service','2016-11-14 13:04:07',NULL);
+
+INSERT INTO `vf_module` (`MODEL_UUID`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `DESCRIPTION`, `IS_BASE`, `HEAT_TEMPLATE_ARTIFACT_UUID`, `VOL_HEAT_TEMPLATE_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`) VALUES ('1e34774e-715e-4fd5-bd08-7b654622f33f.VF_RI1_VFW::module-1::module-1.group','585822c7-4027-4f84-ba50-e9248606f134','1.0','VF_RI1_VFW::module-1',NULL,1,'Artifact-UUID3',NULL,'2016-11-14 13:04:07','685822c7-4027-4f84-ba50-e9248606f132');
+
+INSERT INTO `vf_module_customization` (`MODEL_CUSTOMIZATION_UUID`, `LABEL`, `INITIAL_COUNT`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_COUNT`, `HEAT_ENVIRONMENT_ARTIFACT_UUID`, `VOL_ENVIRONMENT_ARTIFACT_UUID`, `CREATION_TIMESTAMP`, `VF_MODULE_MODEL_UUID`) VALUES ('5aa23938-a9fe-11e7-8b4b-0242ac120002',NULL,1,0,NULL,NULL,'EnvArtifact-UUID3',NULL,'2017-10-05 18:52:03','1e34774e-715e-4fd5-bd08-7b654622f33f.VF_RI1_VFW::module-1::module-1.group');
+
+INSERT INTO `vnf_res_custom_to_vf_module_custom` (`VNF_RESOURCE_CUST_MODEL_CUSTOMIZATION_UUID`, `VF_MODULE_CUST_MODEL_CUSTOMIZATION_UUID`, `CREATION_TIMESTAMP`) VALUES ('5a9bd247-a9fe-11e7-8b4b-0242ac120002','5aa23938-a9fe-11e7-8b4b-0242ac120002','2017-10-05 18:52:03');
+
+INSERT INTO `vnf_resource` (`ORCHESTRATION_MODE`, `DESCRIPTION`, `CREATION_TIMESTAMP`, `MODEL_UUID`, `AIC_VERSION_MIN`, `AIC_VERSION_MAX`, `MODEL_INVARIANT_UUID`, `MODEL_VERSION`, `MODEL_NAME`, `TOSCA_NODE_TYPE`, `HEAT_TEMPLATE_ARTIFACT_UUID`) VALUES ('HEAT','VFW service1707MIGRATED','2016-11-14 13:04:07','685822c7-4027-4f84-ba50-e9248606f132',NULL,NULL,'585822c7-4027-4f84-ba50-e9248606f113','1.0','VFWResource',NULL,NULL);
+
+INSERT INTO `vnf_resource_customization` (`MODEL_CUSTOMIZATION_UUID`, `MODEL_INSTANCE_NAME`, `MIN_INSTANCES`, `MAX_INSTANCES`, `AVAILABILITY_ZONE_MAX_COUNT`, `NF_TYPE`, `NF_ROLE`, `NF_FUNCTION`, `NF_NAMING_CODE`, `CREATION_TIMESTAMP`, `VNF_RESOURCE_MODEL_UUID`) VALUES ('5a9bd247-a9fe-11e7-8b4b-0242ac120002','VFWResource-1',NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2017-10-05 18:52:03','685822c7-4027-4f84-ba50-e9248606f132');
+
+SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mariadb_engine_7.7.3-ee.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mariadb_engine_7.7.3-ee.sql
new file mode 100644
index 000000000..b9b8dd62c
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mariadb_engine_7.7.3-ee.sql
@@ -0,0 +1,1195 @@
+DROP DATABASE IF EXISTS `camundabpmn`;
+
+CREATE DATABASE `camundabpmn`;
+
+USE `camundabpmn`;
+
+# DROP USER IF EXISTS 'camunda';
+delete from mysql.user where User='camunda';
+CREATE USER 'camunda';
+GRANT ALL on camundabpmn.* to 'camunda' identified by 'camunda123' with GRANT OPTION;
+FLUSH PRIVILEGES;
+
+USE `camundabpmn`;
+
+create table ACT_GE_PROPERTY (
+ NAME_ varchar(64),
+ VALUE_ varchar(300),
+ REV_ integer,
+ primary key (NAME_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+insert into ACT_GE_PROPERTY
+values ('schema.version', 'fox', 1);
+
+insert into ACT_GE_PROPERTY
+values ('schema.history', 'create(fox)', 1);
+
+insert into ACT_GE_PROPERTY
+values ('next.dbid', '1', 1);
+
+insert into ACT_GE_PROPERTY
+values ('deployment.lock', '0', 1);
+
+insert into ACT_GE_PROPERTY
+values ('history.cleanup.job.lock', '0', 1);
+
+create table ACT_GE_BYTEARRAY (
+ ID_ varchar(64),
+ REV_ integer,
+ NAME_ varchar(255),
+ DEPLOYMENT_ID_ varchar(64),
+ BYTES_ LONGBLOB,
+ GENERATED_ TINYINT,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RE_DEPLOYMENT (
+ ID_ varchar(64),
+ NAME_ varchar(255),
+ DEPLOY_TIME_ timestamp(3),
+ SOURCE_ varchar(255),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_EXECUTION (
+ ID_ varchar(64),
+ REV_ integer,
+ PROC_INST_ID_ varchar(64),
+ BUSINESS_KEY_ varchar(255),
+ PARENT_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ SUPER_EXEC_ varchar(64),
+ SUPER_CASE_EXEC_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ ACT_ID_ varchar(255),
+ ACT_INST_ID_ varchar(64),
+ IS_ACTIVE_ TINYINT,
+ IS_CONCURRENT_ TINYINT,
+ IS_SCOPE_ TINYINT,
+ IS_EVENT_SCOPE_ TINYINT,
+ SUSPENSION_STATE_ integer,
+ CACHED_ENT_STATE_ integer,
+ SEQUENCE_COUNTER_ bigint,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_JOB (
+ ID_ varchar(64) NOT NULL,
+ REV_ integer,
+ TYPE_ varchar(255) NOT NULL,
+ LOCK_EXP_TIME_ timestamp(3) NULL,
+ LOCK_OWNER_ varchar(255),
+ EXCLUSIVE_ boolean,
+ EXECUTION_ID_ varchar(64),
+ PROCESS_INSTANCE_ID_ varchar(64),
+ PROCESS_DEF_ID_ varchar(64),
+ PROCESS_DEF_KEY_ varchar(255),
+ RETRIES_ integer,
+ EXCEPTION_STACK_ID_ varchar(64),
+ EXCEPTION_MSG_ varchar(4000),
+ DUEDATE_ timestamp(3) NULL,
+ REPEAT_ varchar(255),
+ HANDLER_TYPE_ varchar(255),
+ HANDLER_CFG_ varchar(4000),
+ DEPLOYMENT_ID_ varchar(64),
+ SUSPENSION_STATE_ integer NOT NULL DEFAULT 1,
+ JOB_DEF_ID_ varchar(64),
+ PRIORITY_ bigint NOT NULL DEFAULT 0,
+ SEQUENCE_COUNTER_ bigint,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_JOBDEF (
+ ID_ varchar(64) NOT NULL,
+ REV_ integer,
+ PROC_DEF_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ ACT_ID_ varchar(255),
+ JOB_TYPE_ varchar(255) NOT NULL,
+ JOB_CONFIGURATION_ varchar(255),
+ SUSPENSION_STATE_ integer,
+ JOB_PRIORITY_ bigint,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RE_PROCDEF (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ CATEGORY_ varchar(255),
+ NAME_ varchar(255),
+ KEY_ varchar(255) not null,
+ VERSION_ integer not null,
+ DEPLOYMENT_ID_ varchar(64),
+ RESOURCE_NAME_ varchar(4000),
+ DGRM_RESOURCE_NAME_ varchar(4000),
+ HAS_START_FORM_KEY_ TINYINT,
+ SUSPENSION_STATE_ integer,
+ TENANT_ID_ varchar(64),
+ VERSION_TAG_ varchar(64),
+ HISTORY_TTL_ integer,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_TASK (
+ ID_ varchar(64),
+ REV_ integer,
+ EXECUTION_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ CASE_DEF_ID_ varchar(64),
+ NAME_ varchar(255),
+ PARENT_TASK_ID_ varchar(64),
+ DESCRIPTION_ varchar(4000),
+ TASK_DEF_KEY_ varchar(255),
+ OWNER_ varchar(255),
+ ASSIGNEE_ varchar(255),
+ DELEGATION_ varchar(64),
+ PRIORITY_ integer,
+ CREATE_TIME_ timestamp(3),
+ DUE_DATE_ datetime(3),
+ FOLLOW_UP_DATE_ datetime(3),
+ SUSPENSION_STATE_ integer,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_IDENTITYLINK (
+ ID_ varchar(64),
+ REV_ integer,
+ GROUP_ID_ varchar(255),
+ TYPE_ varchar(255),
+ USER_ID_ varchar(255),
+ TASK_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_VARIABLE (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ TYPE_ varchar(255) not null,
+ NAME_ varchar(255) not null,
+ EXECUTION_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ TASK_ID_ varchar(64),
+ BYTEARRAY_ID_ varchar(64),
+ DOUBLE_ double,
+ LONG_ bigint,
+ TEXT_ LONGBLOB NULL,
+ TEXT2_ LONGBLOB NULL,
+ VAR_SCOPE_ varchar(64) not null,
+ SEQUENCE_COUNTER_ bigint,
+ IS_CONCURRENT_LOCAL_ TINYINT,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_EVENT_SUBSCR (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ EVENT_TYPE_ varchar(255) not null,
+ EVENT_NAME_ varchar(255),
+ EXECUTION_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ ACTIVITY_ID_ varchar(255),
+ CONFIGURATION_ varchar(255),
+ CREATED_ timestamp(3) not null,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_INCIDENT (
+ ID_ varchar(64) not null,
+ REV_ integer not null,
+ INCIDENT_TIMESTAMP_ timestamp(3) not null,
+ INCIDENT_MSG_ varchar(4000),
+ INCIDENT_TYPE_ varchar(255) not null,
+ EXECUTION_ID_ varchar(64),
+ ACTIVITY_ID_ varchar(255),
+ PROC_INST_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ CAUSE_INCIDENT_ID_ varchar(64),
+ ROOT_CAUSE_INCIDENT_ID_ varchar(64),
+ CONFIGURATION_ varchar(255),
+ TENANT_ID_ varchar(64),
+ JOB_DEF_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_AUTHORIZATION (
+ ID_ varchar(64) not null,
+ REV_ integer not null,
+ TYPE_ integer not null,
+ GROUP_ID_ varchar(255),
+ USER_ID_ varchar(255),
+ RESOURCE_TYPE_ integer not null,
+ RESOURCE_ID_ varchar(64),
+ PERMS_ integer,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_FILTER (
+ ID_ varchar(64) not null,
+ REV_ integer not null,
+ RESOURCE_TYPE_ varchar(255) not null,
+ NAME_ varchar(255) not null,
+ OWNER_ varchar(255),
+ QUERY_ LONGTEXT not null,
+ PROPERTIES_ LONGTEXT,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_METER_LOG (
+ ID_ varchar(64) not null,
+ NAME_ varchar(64) not null,
+ REPORTER_ varchar(255),
+ VALUE_ bigint,
+ TIMESTAMP_ timestamp(3),
+ MILLISECONDS_ bigint DEFAULT 0,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_EXT_TASK (
+ ID_ varchar(64) not null,
+ REV_ integer not null,
+ WORKER_ID_ varchar(255),
+ TOPIC_NAME_ varchar(255),
+ RETRIES_ integer,
+ ERROR_MSG_ varchar(4000),
+ ERROR_DETAILS_ID_ varchar(64),
+ LOCK_EXP_TIME_ timestamp(3) NULL,
+ SUSPENSION_STATE_ integer,
+ EXECUTION_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ ACT_ID_ varchar(255),
+ ACT_INST_ID_ varchar(64),
+ TENANT_ID_ varchar(64),
+ PRIORITY_ bigint NOT NULL DEFAULT 0,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_RU_BATCH (
+ ID_ varchar(64) not null,
+ REV_ integer not null,
+ TYPE_ varchar(255),
+ TOTAL_JOBS_ integer,
+ JOBS_CREATED_ integer,
+ JOBS_PER_SEED_ integer,
+ INVOCATIONS_PER_JOB_ integer,
+ SEED_JOB_DEF_ID_ varchar(64),
+ BATCH_JOB_DEF_ID_ varchar(64),
+ MONITOR_JOB_DEF_ID_ varchar(64),
+ SUSPENSION_STATE_ integer,
+ CONFIGURATION_ varchar(255),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create index ACT_IDX_EXEC_BUSKEY on ACT_RU_EXECUTION(BUSINESS_KEY_);
+create index ACT_IDX_EXEC_TENANT_ID on ACT_RU_EXECUTION(TENANT_ID_);
+create index ACT_IDX_TASK_CREATE on ACT_RU_TASK(CREATE_TIME_);
+create index ACT_IDX_TASK_ASSIGNEE on ACT_RU_TASK(ASSIGNEE_);
+create index ACT_IDX_TASK_TENANT_ID on ACT_RU_TASK(TENANT_ID_);
+create index ACT_IDX_IDENT_LNK_USER on ACT_RU_IDENTITYLINK(USER_ID_);
+create index ACT_IDX_IDENT_LNK_GROUP on ACT_RU_IDENTITYLINK(GROUP_ID_);
+create index ACT_IDX_EVENT_SUBSCR_CONFIG_ on ACT_RU_EVENT_SUBSCR(CONFIGURATION_);
+create index ACT_IDX_EVENT_SUBSCR_TENANT_ID on ACT_RU_EVENT_SUBSCR(TENANT_ID_);
+create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_);
+create index ACT_IDX_VARIABLE_TENANT_ID on ACT_RU_VARIABLE(TENANT_ID_);
+create index ACT_IDX_ATHRZ_PROCEDEF on ACT_RU_IDENTITYLINK(PROC_DEF_ID_);
+create index ACT_IDX_INC_CONFIGURATION on ACT_RU_INCIDENT(CONFIGURATION_);
+create index ACT_IDX_INC_TENANT_ID on ACT_RU_INCIDENT(TENANT_ID_);
+-- CAM-5914
+create index ACT_IDX_JOB_EXECUTION_ID on ACT_RU_JOB(EXECUTION_ID_);
+-- this index needs to be limited in mariadb see CAM-6938
+create index ACT_IDX_JOB_HANDLER on ACT_RU_JOB(HANDLER_TYPE_(100),HANDLER_CFG_(155));
+create index ACT_IDX_JOB_PROCINST on ACT_RU_JOB(PROCESS_INSTANCE_ID_);
+create index ACT_IDX_JOB_TENANT_ID on ACT_RU_JOB(TENANT_ID_);
+create index ACT_IDX_JOBDEF_TENANT_ID on ACT_RU_JOBDEF(TENANT_ID_);
+
+-- new metric milliseconds column
+CREATE INDEX ACT_IDX_METER_LOG_MS ON ACT_RU_METER_LOG(MILLISECONDS_);
+CREATE INDEX ACT_IDX_METER_LOG_NAME_MS ON ACT_RU_METER_LOG(NAME_, MILLISECONDS_);
+CREATE INDEX ACT_IDX_METER_LOG_REPORT ON ACT_RU_METER_LOG(NAME_, REPORTER_, MILLISECONDS_);
+
+-- old metric timestamp column
+CREATE INDEX ACT_IDX_METER_LOG_TIME ON ACT_RU_METER_LOG(TIMESTAMP_);
+CREATE INDEX ACT_IDX_METER_LOG ON ACT_RU_METER_LOG(NAME_, TIMESTAMP_);
+
+create index ACT_IDX_EXT_TASK_TOPIC on ACT_RU_EXT_TASK(TOPIC_NAME_);
+create index ACT_IDX_EXT_TASK_TENANT_ID on ACT_RU_EXT_TASK(TENANT_ID_);
+create index ACT_IDX_EXT_TASK_PRIORITY ON ACT_RU_EXT_TASK(PRIORITY_);
+create index ACT_IDX_EXT_TASK_ERR_DETAILS ON ACT_RU_EXT_TASK(ERROR_DETAILS_ID_);
+create index ACT_IDX_AUTH_GROUP_ID ON ACT_RU_AUTHORIZATION(GROUP_ID_);
+create index ACT_IDX_JOB_JOB_DEF_ID on ACT_RU_JOB(JOB_DEF_ID_);
+
+alter table ACT_GE_BYTEARRAY
+ add constraint ACT_FK_BYTEARR_DEPL
+ foreign key (DEPLOYMENT_ID_)
+ references ACT_RE_DEPLOYMENT (ID_);
+
+alter table ACT_RU_EXECUTION
+ add constraint ACT_FK_EXE_PROCINST
+ foreign key (PROC_INST_ID_)
+ references ACT_RU_EXECUTION (ID_) on delete cascade on update cascade;
+
+alter table ACT_RU_EXECUTION
+ add constraint ACT_FK_EXE_PARENT
+ foreign key (PARENT_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_EXECUTION
+ add constraint ACT_FK_EXE_SUPER
+ foreign key (SUPER_EXEC_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_EXECUTION
+ add constraint ACT_FK_EXE_PROCDEF
+ foreign key (PROC_DEF_ID_)
+ references ACT_RE_PROCDEF (ID_);
+
+alter table ACT_RU_IDENTITYLINK
+ add constraint ACT_FK_TSKASS_TASK
+ foreign key (TASK_ID_)
+ references ACT_RU_TASK (ID_);
+
+alter table ACT_RU_IDENTITYLINK
+ add constraint ACT_FK_ATHRZ_PROCEDEF
+ foreign key (PROC_DEF_ID_)
+ references ACT_RE_PROCDEF(ID_);
+
+alter table ACT_RU_TASK
+ add constraint ACT_FK_TASK_EXE
+ foreign key (EXECUTION_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_TASK
+ add constraint ACT_FK_TASK_PROCINST
+ foreign key (PROC_INST_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_TASK
+ add constraint ACT_FK_TASK_PROCDEF
+ foreign key (PROC_DEF_ID_)
+ references ACT_RE_PROCDEF (ID_);
+
+alter table ACT_RU_VARIABLE
+ add constraint ACT_FK_VAR_EXE
+ foreign key (EXECUTION_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_VARIABLE
+ add constraint ACT_FK_VAR_PROCINST
+ foreign key (PROC_INST_ID_)
+ references ACT_RU_EXECUTION(ID_);
+
+alter table ACT_RU_VARIABLE
+ add constraint ACT_FK_VAR_BYTEARRAY
+ foreign key (BYTEARRAY_ID_)
+ references ACT_GE_BYTEARRAY (ID_);
+
+alter table ACT_RU_JOB
+ add constraint ACT_FK_JOB_EXCEPTION
+ foreign key (EXCEPTION_STACK_ID_)
+ references ACT_GE_BYTEARRAY (ID_);
+
+alter table ACT_RU_EVENT_SUBSCR
+ add constraint ACT_FK_EVENT_EXEC
+ foreign key (EXECUTION_ID_)
+ references ACT_RU_EXECUTION(ID_);
+
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_EXE
+ foreign key (EXECUTION_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_PROCINST
+ foreign key (PROC_INST_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_PROCDEF
+ foreign key (PROC_DEF_ID_)
+ references ACT_RE_PROCDEF (ID_);
+
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_CAUSE
+ foreign key (CAUSE_INCIDENT_ID_)
+ references ACT_RU_INCIDENT (ID_) on delete cascade on update cascade;
+
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_RCAUSE
+ foreign key (ROOT_CAUSE_INCIDENT_ID_)
+ references ACT_RU_INCIDENT (ID_) on delete cascade on update cascade;
+
+alter table ACT_RU_EXT_TASK
+ add constraint ACT_FK_EXT_TASK_ERROR_DETAILS
+ foreign key (ERROR_DETAILS_ID_)
+ references ACT_GE_BYTEARRAY (ID_);
+
+create index ACT_IDX_INC_JOB_DEF on ACT_RU_INCIDENT(JOB_DEF_ID_);
+alter table ACT_RU_INCIDENT
+ add constraint ACT_FK_INC_JOB_DEF
+ foreign key (JOB_DEF_ID_)
+ references ACT_RU_JOBDEF (ID_);
+
+alter table ACT_RU_AUTHORIZATION
+ add constraint ACT_UNIQ_AUTH_USER
+ unique (USER_ID_,TYPE_,RESOURCE_TYPE_,RESOURCE_ID_);
+
+alter table ACT_RU_AUTHORIZATION
+ add constraint ACT_UNIQ_AUTH_GROUP
+ unique (GROUP_ID_,TYPE_,RESOURCE_TYPE_,RESOURCE_ID_);
+
+alter table ACT_RU_VARIABLE
+ add constraint ACT_UNIQ_VARIABLE
+ unique (VAR_SCOPE_, NAME_);
+
+alter table ACT_RU_EXT_TASK
+ add constraint ACT_FK_EXT_TASK_EXE
+ foreign key (EXECUTION_ID_)
+ references ACT_RU_EXECUTION (ID_);
+
+create index ACT_IDX_BATCH_SEED_JOB_DEF ON ACT_RU_BATCH(SEED_JOB_DEF_ID_);
+alter table ACT_RU_BATCH
+ add constraint ACT_FK_BATCH_SEED_JOB_DEF
+ foreign key (SEED_JOB_DEF_ID_)
+ references ACT_RU_JOBDEF (ID_);
+
+create index ACT_IDX_BATCH_MONITOR_JOB_DEF ON ACT_RU_BATCH(MONITOR_JOB_DEF_ID_);
+alter table ACT_RU_BATCH
+ add constraint ACT_FK_BATCH_MONITOR_JOB_DEF
+ foreign key (MONITOR_JOB_DEF_ID_)
+ references ACT_RU_JOBDEF (ID_);
+
+create index ACT_IDX_BATCH_JOB_DEF ON ACT_RU_BATCH(BATCH_JOB_DEF_ID_);
+alter table ACT_RU_BATCH
+ add constraint ACT_FK_BATCH_JOB_DEF
+ foreign key (BATCH_JOB_DEF_ID_)
+ references ACT_RU_JOBDEF (ID_);
+
+-- indexes for deadlock problems - https://app.camunda.com/jira/browse/CAM-2567 --
+create index ACT_IDX_INC_CAUSEINCID on ACT_RU_INCIDENT(CAUSE_INCIDENT_ID_);
+create index ACT_IDX_INC_EXID on ACT_RU_INCIDENT(EXECUTION_ID_);
+create index ACT_IDX_INC_PROCDEFID on ACT_RU_INCIDENT(PROC_DEF_ID_);
+create index ACT_IDX_INC_PROCINSTID on ACT_RU_INCIDENT(PROC_INST_ID_);
+create index ACT_IDX_INC_ROOTCAUSEINCID on ACT_RU_INCIDENT(ROOT_CAUSE_INCIDENT_ID_);
+-- index for deadlock problem - https://app.camunda.com/jira/browse/CAM-4440 --
+create index ACT_IDX_AUTH_RESOURCE_ID on ACT_RU_AUTHORIZATION(RESOURCE_ID_);
+-- index to prevent deadlock on fk constraint - https://app.camunda.com/jira/browse/CAM-5440 --
+create index ACT_IDX_EXT_TASK_EXEC on ACT_RU_EXT_TASK(EXECUTION_ID_);
+
+-- indexes to improve deployment
+create index ACT_IDX_BYTEARRAY_NAME on ACT_GE_BYTEARRAY(NAME_);
+create index ACT_IDX_DEPLOYMENT_NAME on ACT_RE_DEPLOYMENT(NAME_);
+create index ACT_IDX_DEPLOYMENT_TENANT_ID on ACT_RE_DEPLOYMENT(TENANT_ID_);
+create index ACT_IDX_JOBDEF_PROC_DEF_ID ON ACT_RU_JOBDEF(PROC_DEF_ID_);
+create index ACT_IDX_JOB_HANDLER_TYPE ON ACT_RU_JOB(HANDLER_TYPE_);
+create index ACT_IDX_EVENT_SUBSCR_EVT_NAME ON ACT_RU_EVENT_SUBSCR(EVENT_NAME_);
+create index ACT_IDX_PROCDEF_DEPLOYMENT_ID ON ACT_RE_PROCDEF(DEPLOYMENT_ID_);
+create index ACT_IDX_PROCDEF_TENANT_ID ON ACT_RE_PROCDEF(TENANT_ID_);
+create index ACT_IDX_PROCDEF_VER_TAG ON ACT_RE_PROCDEF(VERSION_TAG_);
+-- create case definition table --
+create table ACT_RE_CASE_DEF (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ CATEGORY_ varchar(255),
+ NAME_ varchar(255),
+ KEY_ varchar(255) not null,
+ VERSION_ integer not null,
+ DEPLOYMENT_ID_ varchar(64),
+ RESOURCE_NAME_ varchar(4000),
+ DGRM_RESOURCE_NAME_ varchar(4000),
+ TENANT_ID_ varchar(64),
+ HISTORY_TTL_ integer,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create case execution table --
+create table ACT_RU_CASE_EXECUTION (
+ ID_ varchar(64) NOT NULL,
+ REV_ integer,
+ CASE_INST_ID_ varchar(64),
+ SUPER_CASE_EXEC_ varchar(64),
+ SUPER_EXEC_ varchar(64),
+ BUSINESS_KEY_ varchar(255),
+ PARENT_ID_ varchar(64),
+ CASE_DEF_ID_ varchar(64),
+ ACT_ID_ varchar(255),
+ PREV_STATE_ integer,
+ CURRENT_STATE_ integer,
+ REQUIRED_ boolean,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create case sentry part table --
+
+create table ACT_RU_CASE_SENTRY_PART (
+ ID_ varchar(64) NOT NULL,
+ REV_ integer,
+ CASE_INST_ID_ varchar(64),
+ CASE_EXEC_ID_ varchar(64),
+ SENTRY_ID_ varchar(255),
+ TYPE_ varchar(255),
+ SOURCE_CASE_EXEC_ID_ varchar(64),
+ STANDARD_EVENT_ varchar(255),
+ SOURCE_ varchar(255),
+ VARIABLE_EVENT_ varchar(255),
+ VARIABLE_NAME_ varchar(255),
+ SATISFIED_ boolean,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create index on business key --
+create index ACT_IDX_CASE_EXEC_BUSKEY on ACT_RU_CASE_EXECUTION(BUSINESS_KEY_);
+
+-- create foreign key constraints on ACT_RU_CASE_EXECUTION --
+alter table ACT_RU_CASE_EXECUTION
+ add constraint ACT_FK_CASE_EXE_CASE_INST
+ foreign key (CASE_INST_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_) on delete cascade on update cascade;
+
+alter table ACT_RU_CASE_EXECUTION
+ add constraint ACT_FK_CASE_EXE_PARENT
+ foreign key (PARENT_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+alter table ACT_RU_CASE_EXECUTION
+ add constraint ACT_FK_CASE_EXE_CASE_DEF
+ foreign key (CASE_DEF_ID_)
+ references ACT_RE_CASE_DEF(ID_);
+
+-- create foreign key constraints on ACT_RU_VARIABLE --
+alter table ACT_RU_VARIABLE
+ add constraint ACT_FK_VAR_CASE_EXE
+ foreign key (CASE_EXECUTION_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+alter table ACT_RU_VARIABLE
+ add constraint ACT_FK_VAR_CASE_INST
+ foreign key (CASE_INST_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+-- create foreign key constraints on ACT_RU_TASK --
+alter table ACT_RU_TASK
+ add constraint ACT_FK_TASK_CASE_EXE
+ foreign key (CASE_EXECUTION_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+alter table ACT_RU_TASK
+ add constraint ACT_FK_TASK_CASE_DEF
+ foreign key (CASE_DEF_ID_)
+ references ACT_RE_CASE_DEF(ID_);
+
+-- create foreign key constraints on ACT_RU_CASE_SENTRY_PART --
+alter table ACT_RU_CASE_SENTRY_PART
+ add constraint ACT_FK_CASE_SENTRY_CASE_INST
+ foreign key (CASE_INST_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+alter table ACT_RU_CASE_SENTRY_PART
+ add constraint ACT_FK_CASE_SENTRY_CASE_EXEC
+ foreign key (CASE_EXEC_ID_)
+ references ACT_RU_CASE_EXECUTION(ID_);
+
+create index ACT_IDX_CASE_DEF_TENANT_ID on ACT_RE_CASE_DEF(TENANT_ID_);
+create index ACT_IDX_CASE_EXEC_TENANT_ID on ACT_RU_CASE_EXECUTION(TENANT_ID_);
+-- create decision definition table --
+create table ACT_RE_DECISION_DEF (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ CATEGORY_ varchar(255),
+ NAME_ varchar(255),
+ KEY_ varchar(255) not null,
+ VERSION_ integer not null,
+ DEPLOYMENT_ID_ varchar(64),
+ RESOURCE_NAME_ varchar(4000),
+ DGRM_RESOURCE_NAME_ varchar(4000),
+ DEC_REQ_ID_ varchar(64),
+ DEC_REQ_KEY_ varchar(255),
+ TENANT_ID_ varchar(64),
+ HISTORY_TTL_ integer,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create decision requirements definition table --
+create table ACT_RE_DECISION_REQ_DEF (
+ ID_ varchar(64) NOT NULL,
+ REV_ integer,
+ CATEGORY_ varchar(255),
+ NAME_ varchar(255),
+ KEY_ varchar(255) NOT NULL,
+ VERSION_ integer NOT NULL,
+ DEPLOYMENT_ID_ varchar(64),
+ RESOURCE_NAME_ varchar(4000),
+ DGRM_RESOURCE_NAME_ varchar(4000),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+alter table ACT_RE_DECISION_DEF
+ add constraint ACT_FK_DEC_REQ
+ foreign key (DEC_REQ_ID_)
+ references ACT_RE_DECISION_REQ_DEF(ID_);
+
+create index ACT_IDX_DEC_DEF_TENANT_ID on ACT_RE_DECISION_DEF(TENANT_ID_);
+create index ACT_IDX_DEC_DEF_REQ_ID on ACT_RE_DECISION_DEF(DEC_REQ_ID_);
+create index ACT_IDX_DEC_REQ_DEF_TENANT_ID on ACT_RE_DECISION_REQ_DEF(TENANT_ID_);
+create table ACT_HI_PROCINST (
+ ID_ varchar(64) not null,
+ PROC_INST_ID_ varchar(64) not null,
+ BUSINESS_KEY_ varchar(255),
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64) not null,
+ START_TIME_ datetime(3) not null,
+ END_TIME_ datetime(3),
+ DURATION_ bigint,
+ START_USER_ID_ varchar(255),
+ START_ACT_ID_ varchar(255),
+ END_ACT_ID_ varchar(255),
+ SUPER_PROCESS_INSTANCE_ID_ varchar(64),
+ SUPER_CASE_INSTANCE_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ DELETE_REASON_ varchar(4000),
+ TENANT_ID_ varchar(64),
+ STATE_ varchar(255),
+ primary key (ID_),
+ unique (PROC_INST_ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_ACTINST (
+ ID_ varchar(64) not null,
+ PARENT_ACT_INST_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64) not null,
+ PROC_INST_ID_ varchar(64) not null,
+ EXECUTION_ID_ varchar(64) not null,
+ ACT_ID_ varchar(255) not null,
+ TASK_ID_ varchar(64),
+ CALL_PROC_INST_ID_ varchar(64),
+ CALL_CASE_INST_ID_ varchar(64),
+ ACT_NAME_ varchar(255),
+ ACT_TYPE_ varchar(255) not null,
+ ASSIGNEE_ varchar(64),
+ START_TIME_ datetime(3) not null,
+ END_TIME_ datetime(3),
+ DURATION_ bigint,
+ ACT_INST_STATE_ integer,
+ SEQUENCE_COUNTER_ bigint,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_TASKINST (
+ ID_ varchar(64) not null,
+ TASK_DEF_KEY_ varchar(255),
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ CASE_DEF_KEY_ varchar(255),
+ CASE_DEF_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ ACT_INST_ID_ varchar(64),
+ NAME_ varchar(255),
+ PARENT_TASK_ID_ varchar(64),
+ DESCRIPTION_ varchar(4000),
+ OWNER_ varchar(255),
+ ASSIGNEE_ varchar(255),
+ START_TIME_ datetime(3) not null,
+ END_TIME_ datetime(3),
+ DURATION_ bigint,
+ DELETE_REASON_ varchar(4000),
+ PRIORITY_ integer,
+ DUE_DATE_ datetime(3),
+ FOLLOW_UP_DATE_ datetime(3),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_VARINST (
+ ID_ varchar(64) not null,
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ ACT_INST_ID_ varchar(64),
+ CASE_DEF_KEY_ varchar(255),
+ CASE_DEF_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ TASK_ID_ varchar(64),
+ NAME_ varchar(255) not null,
+ VAR_TYPE_ varchar(100),
+ REV_ integer,
+ BYTEARRAY_ID_ varchar(64),
+ DOUBLE_ double,
+ LONG_ bigint,
+ TEXT_ LONGBLOB NULL,
+ TEXT2_ LONGBLOB NULL,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_DETAIL (
+ ID_ varchar(64) not null,
+ TYPE_ varchar(255) not null,
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ CASE_DEF_KEY_ varchar(255),
+ CASE_DEF_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ TASK_ID_ varchar(64),
+ ACT_INST_ID_ varchar(64),
+ VAR_INST_ID_ varchar(64),
+ NAME_ varchar(255) not null,
+ VAR_TYPE_ varchar(255),
+ REV_ integer,
+ TIME_ datetime(3) not null,
+ BYTEARRAY_ID_ varchar(64),
+ DOUBLE_ double,
+ LONG_ bigint,
+ TEXT_ LONGBLOB NULL,
+ TEXT2_ LONGBLOB NULL,
+ SEQUENCE_COUNTER_ bigint,
+ TENANT_ID_ varchar(64),
+ OPERATION_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_IDENTITYLINK (
+ ID_ varchar(64) not null,
+ TIMESTAMP_ timestamp(3) not null,
+ TYPE_ varchar(255),
+ USER_ID_ varchar(255),
+ GROUP_ID_ varchar(255),
+ TASK_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ OPERATION_TYPE_ varchar(64),
+ ASSIGNER_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_COMMENT (
+ ID_ varchar(64) not null,
+ TYPE_ varchar(255),
+ TIME_ datetime(3) not null,
+ USER_ID_ varchar(255),
+ TASK_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ ACTION_ varchar(255),
+ MESSAGE_ varchar(4000),
+ FULL_MSG_ LONGBLOB,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_ATTACHMENT (
+ ID_ varchar(64) not null,
+ REV_ integer,
+ USER_ID_ varchar(255),
+ NAME_ varchar(255),
+ DESCRIPTION_ varchar(4000),
+ TYPE_ varchar(255),
+ TASK_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ URL_ varchar(4000),
+ CONTENT_ID_ varchar(64),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_OP_LOG (
+ ID_ varchar(64) not null,
+ DEPLOYMENT_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ PROC_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ CASE_DEF_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ CASE_EXECUTION_ID_ varchar(64),
+ TASK_ID_ varchar(64),
+ JOB_ID_ varchar(64),
+ JOB_DEF_ID_ varchar(64),
+ BATCH_ID_ varchar(64),
+ USER_ID_ varchar(255),
+ TIMESTAMP_ timestamp(3) not null,
+ OPERATION_TYPE_ varchar(64),
+ OPERATION_ID_ varchar(64),
+ ENTITY_TYPE_ varchar(30),
+ PROPERTY_ varchar(64),
+ ORG_VALUE_ varchar(4000),
+ NEW_VALUE_ varchar(4000),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_INCIDENT (
+ ID_ varchar(64) not null,
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ CREATE_TIME_ timestamp(3) not null,
+ END_TIME_ timestamp(3) null,
+ INCIDENT_MSG_ varchar(4000),
+ INCIDENT_TYPE_ varchar(255) not null,
+ ACTIVITY_ID_ varchar(255),
+ CAUSE_INCIDENT_ID_ varchar(64),
+ ROOT_CAUSE_INCIDENT_ID_ varchar(64),
+ CONFIGURATION_ varchar(255),
+ INCIDENT_STATE_ integer,
+ TENANT_ID_ varchar(64),
+ JOB_DEF_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_JOB_LOG (
+ ID_ varchar(64) not null,
+ TIMESTAMP_ timestamp(3) not null,
+ JOB_ID_ varchar(64) not null,
+ JOB_DUEDATE_ timestamp(3) NULL,
+ JOB_RETRIES_ integer,
+ JOB_PRIORITY_ bigint NOT NULL DEFAULT 0,
+ JOB_EXCEPTION_MSG_ varchar(4000),
+ JOB_EXCEPTION_STACK_ID_ varchar(64),
+ JOB_STATE_ integer,
+ JOB_DEF_ID_ varchar(64),
+ JOB_DEF_TYPE_ varchar(255),
+ JOB_DEF_CONFIGURATION_ varchar(255),
+ ACT_ID_ varchar(255),
+ EXECUTION_ID_ varchar(64),
+ PROCESS_INSTANCE_ID_ varchar(64),
+ PROCESS_DEF_ID_ varchar(64),
+ PROCESS_DEF_KEY_ varchar(255),
+ DEPLOYMENT_ID_ varchar(64),
+ SEQUENCE_COUNTER_ bigint,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_BATCH (
+ ID_ varchar(64) not null,
+ TYPE_ varchar(255),
+ TOTAL_JOBS_ integer,
+ JOBS_PER_SEED_ integer,
+ INVOCATIONS_PER_JOB_ integer,
+ SEED_JOB_DEF_ID_ varchar(64),
+ MONITOR_JOB_DEF_ID_ varchar(64),
+ BATCH_JOB_DEF_ID_ varchar(64),
+ TENANT_ID_ varchar(64),
+ START_TIME_ datetime(3) not null,
+ END_TIME_ datetime(3),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_EXT_TASK_LOG (
+ ID_ varchar(64) not null,
+ TIMESTAMP_ timestamp(3) not null,
+ EXT_TASK_ID_ varchar(64) not null,
+ RETRIES_ integer,
+ TOPIC_NAME_ varchar(255),
+ WORKER_ID_ varchar(255),
+ PRIORITY_ bigint NOT NULL DEFAULT 0,
+ ERROR_MSG_ varchar(4000),
+ ERROR_DETAILS_ID_ varchar(64),
+ ACT_ID_ varchar(255),
+ ACT_INST_ID_ varchar(64),
+ EXECUTION_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ PROC_DEF_ID_ varchar(64),
+ PROC_DEF_KEY_ varchar(255),
+ TENANT_ID_ varchar(64),
+ STATE_ integer,
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create index ACT_IDX_HI_PRO_INST_END on ACT_HI_PROCINST(END_TIME_);
+create index ACT_IDX_HI_PRO_I_BUSKEY on ACT_HI_PROCINST(BUSINESS_KEY_);
+create index ACT_IDX_HI_PRO_INST_TENANT_ID on ACT_HI_PROCINST(TENANT_ID_);
+create index ACT_IDX_HI_PRO_INST_PROC_DEF_KEY on ACT_HI_PROCINST(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_ACT_INST_START on ACT_HI_ACTINST(START_TIME_);
+create index ACT_IDX_HI_ACT_INST_END on ACT_HI_ACTINST(END_TIME_);
+create index ACT_IDX_HI_ACT_INST_PROCINST on ACT_HI_ACTINST(PROC_INST_ID_, ACT_ID_);
+create index ACT_IDX_HI_ACT_INST_COMP on ACT_HI_ACTINST(EXECUTION_ID_, ACT_ID_, END_TIME_, ID_);
+create index ACT_IDX_HI_ACT_INST_STATS on ACT_HI_ACTINST(PROC_DEF_ID_, ACT_ID_, END_TIME_, ACT_INST_STATE_);
+create index ACT_IDX_HI_ACT_INST_TENANT_ID on ACT_HI_ACTINST(TENANT_ID_);
+create index ACT_IDX_HI_ACT_INST_PROC_DEF_KEY on ACT_HI_ACTINST(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_TASK_INST_TENANT_ID on ACT_HI_TASKINST(TENANT_ID_);
+create index ACT_IDX_HI_TASK_INST_PROC_DEF_KEY on ACT_HI_TASKINST(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_DETAIL_PROC_INST on ACT_HI_DETAIL(PROC_INST_ID_);
+create index ACT_IDX_HI_DETAIL_ACT_INST on ACT_HI_DETAIL(ACT_INST_ID_);
+create index ACT_IDX_HI_DETAIL_CASE_INST on ACT_HI_DETAIL(CASE_INST_ID_);
+create index ACT_IDX_HI_DETAIL_CASE_EXEC on ACT_HI_DETAIL(CASE_EXECUTION_ID_);
+create index ACT_IDX_HI_DETAIL_TIME on ACT_HI_DETAIL(TIME_);
+create index ACT_IDX_HI_DETAIL_NAME on ACT_HI_DETAIL(NAME_);
+create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_);
+create index ACT_IDX_HI_DETAIL_TENANT_ID on ACT_HI_DETAIL(TENANT_ID_);
+create index ACT_IDX_HI_DETAIL_PROC_DEF_KEY on ACT_HI_DETAIL(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_IDENT_LNK_USER on ACT_HI_IDENTITYLINK(USER_ID_);
+create index ACT_IDX_HI_IDENT_LNK_GROUP on ACT_HI_IDENTITYLINK(GROUP_ID_);
+create index ACT_IDX_HI_IDENT_LNK_TENANT_ID on ACT_HI_IDENTITYLINK(TENANT_ID_);
+create index ACT_IDX_HI_IDENT_LNK_PROC_DEF_KEY on ACT_HI_IDENTITYLINK(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_PROCVAR_PROC_INST on ACT_HI_VARINST(PROC_INST_ID_);
+create index ACT_IDX_HI_PROCVAR_NAME_TYPE on ACT_HI_VARINST(NAME_, VAR_TYPE_);
+create index ACT_IDX_HI_CASEVAR_CASE_INST on ACT_HI_VARINST(CASE_INST_ID_);
+create index ACT_IDX_HI_VAR_INST_TENANT_ID on ACT_HI_VARINST(TENANT_ID_);
+create index ACT_IDX_HI_VAR_INST_PROC_DEF_KEY on ACT_HI_VARINST(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_INCIDENT_TENANT_ID on ACT_HI_INCIDENT(TENANT_ID_);
+create index ACT_IDX_HI_INCIDENT_PROC_DEF_KEY on ACT_HI_INCIDENT(PROC_DEF_KEY_);
+
+create index ACT_IDX_HI_JOB_LOG_PROCINST on ACT_HI_JOB_LOG(PROCESS_INSTANCE_ID_);
+create index ACT_IDX_HI_JOB_LOG_PROCDEF on ACT_HI_JOB_LOG(PROCESS_DEF_ID_);
+create index ACT_IDX_HI_JOB_LOG_TENANT_ID on ACT_HI_JOB_LOG(TENANT_ID_);
+create index ACT_IDX_HI_JOB_LOG_JOB_DEF_ID on ACT_HI_JOB_LOG(JOB_DEF_ID_);
+create index ACT_IDX_HI_JOB_LOG_PROC_DEF_KEY on ACT_HI_JOB_LOG(PROCESS_DEF_KEY_);
+
+create index ACT_HI_EXT_TASK_LOG_PROCINST on ACT_HI_EXT_TASK_LOG(PROC_INST_ID_);
+create index ACT_HI_EXT_TASK_LOG_PROCDEF on ACT_HI_EXT_TASK_LOG(PROC_DEF_ID_);
+create index ACT_HI_EXT_TASK_LOG_PROC_DEF_KEY on ACT_HI_EXT_TASK_LOG(PROC_DEF_KEY_);
+create index ACT_HI_EXT_TASK_LOG_TENANT_ID on ACT_HI_EXT_TASK_LOG(TENANT_ID_);
+
+create index ACT_IDX_HI_OP_LOG_PROCINST on ACT_HI_OP_LOG(PROC_INST_ID_);
+create index ACT_IDX_HI_OP_LOG_PROCDEF on ACT_HI_OP_LOG(PROC_DEF_ID_);
+create table ACT_HI_CASEINST (
+ ID_ varchar(64) not null,
+ CASE_INST_ID_ varchar(64) not null,
+ BUSINESS_KEY_ varchar(255),
+ CASE_DEF_ID_ varchar(64) not null,
+ CREATE_TIME_ datetime(3) not null,
+ CLOSE_TIME_ datetime(3),
+ DURATION_ bigint,
+ STATE_ integer,
+ CREATE_USER_ID_ varchar(255),
+ SUPER_CASE_INSTANCE_ID_ varchar(64),
+ SUPER_PROCESS_INSTANCE_ID_ varchar(64),
+ TENANT_ID_ varchar(64),
+ primary key (ID_),
+ unique (CASE_INST_ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_HI_CASEACTINST (
+ ID_ varchar(64) not null,
+ PARENT_ACT_INST_ID_ varchar(64),
+ CASE_DEF_ID_ varchar(64) not null,
+ CASE_INST_ID_ varchar(64) not null,
+ CASE_ACT_ID_ varchar(255) not null,
+ TASK_ID_ varchar(64),
+ CALL_PROC_INST_ID_ varchar(64),
+ CALL_CASE_INST_ID_ varchar(64),
+ CASE_ACT_NAME_ varchar(255),
+ CASE_ACT_TYPE_ varchar(255),
+ CREATE_TIME_ datetime(3) not null,
+ END_TIME_ datetime(3),
+ DURATION_ bigint,
+ STATE_ integer,
+ REQUIRED_ boolean,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create index ACT_IDX_HI_CAS_I_CLOSE on ACT_HI_CASEINST(CLOSE_TIME_);
+create index ACT_IDX_HI_CAS_I_BUSKEY on ACT_HI_CASEINST(BUSINESS_KEY_);
+create index ACT_IDX_HI_CAS_I_TENANT_ID on ACT_HI_CASEINST(TENANT_ID_);
+create index ACT_IDX_HI_CAS_A_I_CREATE on ACT_HI_CASEACTINST(CREATE_TIME_);
+create index ACT_IDX_HI_CAS_A_I_END on ACT_HI_CASEACTINST(END_TIME_);
+create index ACT_IDX_HI_CAS_A_I_COMP on ACT_HI_CASEACTINST(CASE_ACT_ID_, END_TIME_, ID_);
+create index ACT_IDX_HI_CAS_A_I_CASEINST on ACT_HI_CASEACTINST(CASE_INST_ID_, CASE_ACT_ID_);
+create index ACT_IDX_HI_CAS_A_I_TENANT_ID on ACT_HI_CASEACTINST(TENANT_ID_);
+-- create history decision instance table --
+create table ACT_HI_DECINST (
+ ID_ varchar(64) NOT NULL,
+ DEC_DEF_ID_ varchar(64) NOT NULL,
+ DEC_DEF_KEY_ varchar(255) NOT NULL,
+ DEC_DEF_NAME_ varchar(255),
+ PROC_DEF_KEY_ varchar(255),
+ PROC_DEF_ID_ varchar(64),
+ PROC_INST_ID_ varchar(64),
+ CASE_DEF_KEY_ varchar(255),
+ CASE_DEF_ID_ varchar(64),
+ CASE_INST_ID_ varchar(64),
+ ACT_INST_ID_ varchar(64),
+ ACT_ID_ varchar(255),
+ EVAL_TIME_ datetime(3) not null,
+ COLLECT_VALUE_ double,
+ USER_ID_ varchar(255),
+ ROOT_DEC_INST_ID_ varchar(64),
+ DEC_REQ_ID_ varchar(64),
+ DEC_REQ_KEY_ varchar(255),
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create history decision input table --
+create table ACT_HI_DEC_IN (
+ ID_ varchar(64) NOT NULL,
+ DEC_INST_ID_ varchar(64) NOT NULL,
+ CLAUSE_ID_ varchar(64),
+ CLAUSE_NAME_ varchar(255),
+ VAR_TYPE_ varchar(100),
+ BYTEARRAY_ID_ varchar(64),
+ DOUBLE_ double,
+ LONG_ bigint,
+ TEXT_ LONGBLOB NULL,
+ TEXT2_ LONGBLOB NULL,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+-- create history decision output table --
+create table ACT_HI_DEC_OUT (
+ ID_ varchar(64) NOT NULL,
+ DEC_INST_ID_ varchar(64) NOT NULL,
+ CLAUSE_ID_ varchar(64),
+ CLAUSE_NAME_ varchar(255),
+ RULE_ID_ varchar(64),
+ RULE_ORDER_ integer,
+ VAR_NAME_ varchar(255),
+ VAR_TYPE_ varchar(100),
+ BYTEARRAY_ID_ varchar(64),
+ DOUBLE_ double,
+ LONG_ bigint,
+ TEXT_ LONGBLOB NULL,
+ TEXT2_ LONGBLOB NULL,
+ TENANT_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+
+create index ACT_IDX_HI_DEC_INST_ID on ACT_HI_DECINST(DEC_DEF_ID_);
+create index ACT_IDX_HI_DEC_INST_KEY on ACT_HI_DECINST(DEC_DEF_KEY_);
+create index ACT_IDX_HI_DEC_INST_PI on ACT_HI_DECINST(PROC_INST_ID_);
+create index ACT_IDX_HI_DEC_INST_CI on ACT_HI_DECINST(CASE_INST_ID_);
+create index ACT_IDX_HI_DEC_INST_ACT on ACT_HI_DECINST(ACT_ID_);
+create index ACT_IDX_HI_DEC_INST_ACT_INST on ACT_HI_DECINST(ACT_INST_ID_);
+create index ACT_IDX_HI_DEC_INST_TIME on ACT_HI_DECINST(EVAL_TIME_);
+create index ACT_IDX_HI_DEC_INST_TENANT_ID on ACT_HI_DECINST(TENANT_ID_);
+create index ACT_IDX_HI_DEC_INST_ROOT_ID on ACT_HI_DECINST(ROOT_DEC_INST_ID_);
+create index ACT_IDX_HI_DEC_INST_REQ_ID on ACT_HI_DECINST(DEC_REQ_ID_);
+create index ACT_IDX_HI_DEC_INST_REQ_KEY on ACT_HI_DECINST(DEC_REQ_KEY_);
+
+
+create index ACT_IDX_HI_DEC_IN_INST on ACT_HI_DEC_IN(DEC_INST_ID_);
+create index ACT_IDX_HI_DEC_IN_CLAUSE on ACT_HI_DEC_IN(DEC_INST_ID_, CLAUSE_ID_);
+
+create index ACT_IDX_HI_DEC_OUT_INST on ACT_HI_DEC_OUT(DEC_INST_ID_);
+create index ACT_IDX_HI_DEC_OUT_RULE on ACT_HI_DEC_OUT(RULE_ORDER_, CLAUSE_ID_);
+
+
+-- mariadb identity:
+
+create table ACT_ID_GROUP (
+ ID_ varchar(64),
+ REV_ integer,
+ NAME_ varchar(255),
+ TYPE_ varchar(255),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_ID_MEMBERSHIP (
+ USER_ID_ varchar(64),
+ GROUP_ID_ varchar(64),
+ primary key (USER_ID_, GROUP_ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_ID_USER (
+ ID_ varchar(64),
+ REV_ integer,
+ FIRST_ varchar(255),
+ LAST_ varchar(255),
+ EMAIL_ varchar(255),
+ PWD_ varchar(255),
+ SALT_ varchar(255),
+ PICTURE_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_ID_INFO (
+ ID_ varchar(64),
+ REV_ integer,
+ USER_ID_ varchar(64),
+ TYPE_ varchar(64),
+ KEY_ varchar(255),
+ VALUE_ varchar(255),
+ PASSWORD_ LONGBLOB,
+ PARENT_ID_ varchar(255),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_ID_TENANT (
+ ID_ varchar(64),
+ REV_ integer,
+ NAME_ varchar(255),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+create table ACT_ID_TENANT_MEMBER (
+ ID_ varchar(64) not null,
+ TENANT_ID_ varchar(64) not null,
+ USER_ID_ varchar(64),
+ GROUP_ID_ varchar(64),
+ primary key (ID_)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin;
+
+alter table ACT_ID_MEMBERSHIP
+ add constraint ACT_FK_MEMB_GROUP
+ foreign key (GROUP_ID_)
+ references ACT_ID_GROUP (ID_);
+
+alter table ACT_ID_MEMBERSHIP
+ add constraint ACT_FK_MEMB_USER
+ foreign key (USER_ID_)
+ references ACT_ID_USER (ID_);
+
+alter table ACT_ID_TENANT_MEMBER
+ add constraint ACT_UNIQ_TENANT_MEMB_USER
+ unique (TENANT_ID_, USER_ID_);
+
+alter table ACT_ID_TENANT_MEMBER
+ add constraint ACT_UNIQ_TENANT_MEMB_GROUP
+ unique (TENANT_ID_, GROUP_ID_);
+
+alter table ACT_ID_TENANT_MEMBER
+ add constraint ACT_FK_TENANT_MEMB
+ foreign key (TENANT_ID_)
+ references ACT_ID_TENANT (ID_);
+
+alter table ACT_ID_TENANT_MEMBER
+ add constraint ACT_FK_TENANT_MEMB_USER
+ foreign key (USER_ID_)
+ references ACT_ID_USER (ID_);
+
+alter table ACT_ID_TENANT_MEMBER
+ add constraint ACT_FK_TENANT_MEMB_GROUP
+ foreign key (GROUP_ID_)
+ references ACT_ID_GROUP (ID_);
+
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mysql_create_camunda_admin.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mysql_create_camunda_admin.sql
new file mode 100644
index 000000000..3658c6c23
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/camunda/mysql_create_camunda_admin.sql
@@ -0,0 +1,25 @@
+USE camundabpmn;
+
+INSERT INTO `act_id_group` (`ID_`, `REV_`, `NAME_`, `TYPE_`) VALUES ('camunda-admin',1,'camunda BPM Administrators','SYSTEM');
+
+INSERT INTO `act_id_user` (`ID_`, `REV_`, `FIRST_`, `LAST_`, `EMAIL_`, `PWD_`, `SALT_`, `PICTURE_ID_`) VALUES ('admin',1,'admin','user','camundaadmin@onap.org','{SHA-512}n5jUw7fvXM9sZBcrIkLiAOCqiPHutaqEkbg6IQVQdylVP1im8SczBJf4f2xL7cvWwIAZjkcSSQzgFTsdaJSEiA==','ftTn4gNgMcq07wdSD0lEJQ==',NULL);
+
+INSERT INTO `act_id_membership` (`USER_ID_`, `GROUP_ID_`) VALUES ('admin','camunda-admin');
+
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49b0e028-a3c6-11e7-b0ec-0242ac120003',1,1,NULL,'admin',1,'admin',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49b525e9-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,2,'camunda-admin',2);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49b8814a-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,0,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49baa42b-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,1,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49bd8a5c-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,2,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49bfd44d-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,3,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49c1f72e-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,4,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49c41a0f-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,5,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49c77570-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,6,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49ca5ba1-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,7,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49cca592-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,8,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49ceef83-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,9,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49d11264-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,10,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49d38365-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,11,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49d5a646-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,12,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49d83e57-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,13,'*',2147483647);
+INSERT INTO `act_ru_authorization` (`ID_`, `REV_`, `TYPE_`, `GROUP_ID_`, `USER_ID_`, `RESOURCE_TYPE_`, `RESOURCE_ID_`, `PERMS_`) VALUES ('49da3a28-a3c6-11e7-b0ec-0242ac120003',1,1,'camunda-admin',NULL,14,'*',2147483647);
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Catalog-schema.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Catalog-schema.sql
new file mode 100644
index 000000000..ca002fbe6
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Catalog-schema.sql
@@ -0,0 +1,462 @@
+
+ alter table HEAT_TEMPLATE_PARAMS
+ drop
+ foreign key FK_p3ol1xcvp831glqohrlu6o07o;
+
+ alter table MODEL_RECIPE
+ drop
+ foreign key FK_c23r0puyqug6n44jg39dutm1c;
+
+ alter table SERVICE
+ drop
+ foreign key FK_l3qy594u2xr1tfpmma3uigsna;
+
+ alter table SERVICE_RECIPE
+ drop
+ foreign key FK_i3r1b8j6e7dg9hkp49evnnm5y;
+
+ alter table SERVICE_TO_RESOURCE_CUSTOMIZATIONS
+ drop
+ foreign key FK_kiddaay6cfe0aob1f1jaio1bb;
+
+ alter table VF_MODULE
+ drop
+ foreign key FK_12jptc9it7gs3pru08skobxxc;
+
+ alter table VNF_RESOURCE_CUSTOMIZATION
+ drop
+ foreign key FK_iff1ayhb1hrp5jhea3vvikuni;
+
+ drop table if exists ALLOTTED_RESOURCE;
+
+ drop table if exists ALLOTTED_RESOURCE_CUSTOMIZATION;
+
+ drop table if exists HEAT_ENVIRONMENT;
+
+ drop table if exists HEAT_FILES;
+
+ drop table if exists HEAT_NESTED_TEMPLATE;
+
+ drop table if exists HEAT_TEMPLATE;
+
+ drop table if exists HEAT_TEMPLATE_PARAMS;
+
+ drop table if exists MODEL;
+
+ drop table if exists MODEL_RECIPE;
+
+ drop table if exists NETWORK_RECIPE;
+
+ drop table if exists NETWORK_RESOURCE;
+
+ drop table if exists NETWORK_RESOURCE_CUSTOMIZATION;
+
+ drop table if exists SERVICE;
+
+ drop table if exists SERVICE_RECIPE;
+
+ drop table if exists SERVICE_TO_ALLOTTED_RESOURCES;
+
+ drop table if exists SERVICE_TO_NETWORKS;
+
+ drop table if exists SERVICE_TO_RESOURCE_CUSTOMIZATIONS;
+
+ drop table if exists TEMP_NETWORK_HEAT_TEMPLATE_LOOKUP;
+
+ drop table if exists TOSCA_CSAR;
+
+ drop table if exists VF_MODULE;
+
+ drop table if exists VF_MODULE_CUSTOMIZATION;
+
+ drop table if exists VF_MODULE_TO_HEAT_FILES;
+
+ drop table if exists VNF_COMPONENTS;
+
+ drop table if exists VNF_COMPONENTS_RECIPE;
+
+ drop table if exists VNF_RECIPE;
+
+ drop table if exists VNF_RESOURCE;
+
+ drop table if exists VNF_RESOURCE_CUSTOMIZATION;
+
+ drop table if exists VNF_RES_CUSTOM_TO_VF_MODULE_CUSTOM;
+
+ create table ALLOTTED_RESOURCE (
+ MODEL_UUID varchar(255) not null,
+ MODEL_INVARIANT_UUID varchar(255),
+ MODEL_VERSION varchar(255),
+ MODEL_NAME varchar(255),
+ TOSCA_NODE_TYPE varchar(255),
+ SUBCATEGORY varchar(255),
+ DESCRIPTION varchar(255),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_UUID)
+ );
+
+ create table ALLOTTED_RESOURCE_CUSTOMIZATION (
+ MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ MODEL_INSTANCE_NAME varchar(255),
+ AR_MODEL_UUID varchar(255),
+ PROVIDING_SERVICE_MODEL_INVARIANT_UUID varchar(255),
+ TARGET_NETWORK_ROLE varchar(255),
+ NF_FUNCTION varchar(255),
+ NF_TYPE varchar(255),
+ NF_ROLE varchar(255),
+ NF_NAMING_CODE varchar(255),
+ MIN_INSTANCES integer,
+ MAX_INSTANCES integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table HEAT_ENVIRONMENT (
+ ARTIFACT_UUID varchar(200) not null,
+ NAME varchar(100) not null,
+ VERSION varchar(20) not null,
+ DESCRIPTION varchar(1200),
+ BODY longtext not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ ARTIFACT_CHECKSUM varchar(200) default 'MANUAL RECORD',
+ primary key (ARTIFACT_UUID)
+ );
+
+ create table HEAT_FILES (
+ ARTIFACT_UUID varchar(255) not null,
+ DESCRIPTION varchar(255),
+ NAME varchar(255),
+ VERSION varchar(255),
+ BODY longtext,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ ARTIFACT_CHECKSUM varchar(255),
+ primary key (ARTIFACT_UUID)
+ );
+
+ create table HEAT_NESTED_TEMPLATE (
+ PARENT_HEAT_TEMPLATE_UUID varchar(200) not null,
+ CHILD_HEAT_TEMPLATE_UUID varchar(200) not null,
+ PROVIDER_RESOURCE_FILE varchar(100),
+ primary key (PARENT_HEAT_TEMPLATE_UUID, CHILD_HEAT_TEMPLATE_UUID)
+ );
+
+ create table HEAT_TEMPLATE (
+ ARTIFACT_UUID varchar(200) not null,
+ NAME varchar(200) not null,
+ VERSION varchar(20) not null,
+ BODY longtext not null,
+ TIMEOUT_MINUTES integer,
+ DESCRIPTION varchar(1200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ ARTIFACT_CHECKSUM varchar(200) default 'MANUAL RECORD' not null,
+ primary key (ARTIFACT_UUID)
+ );
+
+ create table HEAT_TEMPLATE_PARAMS (
+ HEAT_TEMPLATE_ARTIFACT_UUID varchar(255) not null,
+ PARAM_NAME varchar(255) not null,
+ IS_REQUIRED bit not null,
+ PARAM_TYPE varchar(20),
+ PARAM_ALIAS varchar(45),
+ primary key (HEAT_TEMPLATE_ARTIFACT_UUID, PARAM_NAME)
+ );
+
+ create table MODEL (
+ id integer not null auto_increment,
+ MODEL_TYPE varchar(20) not null,
+ MODEL_VERSION_ID varchar(40) not null,
+ MODEL_INVARIANT_ID varchar(40),
+ MODEL_NAME varchar(40) not null,
+ MODEL_VERSION varchar(20),
+ MODEL_CUSTOMIZATION_ID varchar(40),
+ MODEL_CUSTOMIZATION_NAME varchar(40),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (id)
+ );
+
+ create table MODEL_RECIPE (
+ id integer not null auto_increment,
+ MODEL_ID integer not null,
+ ACTION varchar(20) not null,
+ SCHEMA_VERSION varchar(20),
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_URI varchar(256) not null,
+ MODEL_PARAM_XSD varchar(2048),
+ RECIPE_TIMEOUT integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (id)
+ );
+
+ create table NETWORK_RECIPE (
+ id integer not null auto_increment,
+ MODEL_NAME varchar(20) not null,
+ ACTION varchar(20) not null,
+ VERSION_STR varchar(20) not null,
+ SERVICE_TYPE varchar(45),
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_URI varchar(256) not null,
+ NETWORK_PARAM_XSD varchar(2048),
+ RECIPE_TIMEOUT integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (id)
+ );
+
+ create table NETWORK_RESOURCE (
+ MODEL_UUID varchar(200) not null,
+ MODEL_NAME varchar(200) not null,
+ MODEL_INVARIANT_UUID varchar(200),
+ MODEL_VERSION varchar(20),
+ TOSCA_NODE_TYPE varchar(200),
+ NEUTRON_NETWORK_TYPE varchar(20),
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_MODE varchar(20),
+ HEAT_TEMPLATE_ARTIFACT_UUID varchar(200) not null,
+ AIC_VERSION_MIN varchar(20) default 2.5 not null,
+ AIC_VERSION_MAX varchar(20) default 2.5,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_UUID)
+ );
+
+ create table NETWORK_RESOURCE_CUSTOMIZATION (
+ MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ NETWORK_RESOURCE_MODEL_UUID varchar(200) not null,
+ MODEL_INSTANCE_NAME varchar(255),
+ NETWORK_TECHNOLOGY varchar(255),
+ NETWORK_TYPE varchar(255),
+ NETWORK_SCOPE varchar(255),
+ NETWORK_ROLE varchar(255),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table SERVICE (
+ MODEL_UUID varchar(200) not null,
+ MODEL_NAME varchar(200) not null,
+ MODEL_VERSION varchar(20) not null,
+ DESCRIPTION varchar(1200),
+ TOSCA_CSAR_ARTIFACT_UUID varchar(200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ MODEL_INVARIANT_UUID varchar(200) default 'MANUAL_RECORD' not null,
+ SERVICE_TYPE varchar(20),
+ SERVICE_ROLE varchar(20),
+ primary key (MODEL_UUID)
+ );
+
+ create table SERVICE_RECIPE (
+ id integer not null auto_increment,
+ SERVICE_MODEL_UUID varchar(200) not null,
+ ACTION varchar(40) not null,
+ ORCHESTRATION_URI varchar(256) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ VERSION_STR varchar(20),
+ DESCRIPTION varchar(1200),
+ SERVICE_PARAM_XSD varchar(2048),
+ RECIPE_TIMEOUT integer,
+ SERVICE_TIMEOUT_INTERIM integer,
+ primary key (id)
+ );
+
+ create table SERVICE_TO_ALLOTTED_RESOURCES (
+ SERVICE_MODEL_UUID varchar(200) not null,
+ AR_MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (SERVICE_MODEL_UUID, AR_MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table SERVICE_TO_NETWORKS (
+ SERVICE_MODEL_UUID varchar(200) not null,
+ NETWORK_MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (SERVICE_MODEL_UUID, NETWORK_MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table SERVICE_TO_RESOURCE_CUSTOMIZATIONS (
+ MODEL_TYPE varchar(20) not null,
+ RESOURCE_MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ SERVICE_MODEL_UUID varchar(200) not null,
+ primary key (MODEL_TYPE, RESOURCE_MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table TEMP_NETWORK_HEAT_TEMPLATE_LOOKUP (
+ NETWORK_RESOURCE_MODEL_NAME varchar(200) not null,
+ HEAT_TEMPLATE_ARTIFACT_UUID varchar(200) not null,
+ AIC_VERSION_MIN varchar(20) not null,
+ AIC_VERSION_MAX varchar(20),
+ primary key (NETWORK_RESOURCE_MODEL_NAME)
+ );
+
+ create table TOSCA_CSAR (
+ ARTIFACT_UUID varchar(200) not null,
+ NAME varchar(200) not null,
+ VERSION varchar(20) not null,
+ ARTIFACT_CHECKSUM varchar(200) not null,
+ URL varchar(200) not null,
+ DESCRIPTION varchar(1200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (ARTIFACT_UUID)
+ );
+
+ create table VF_MODULE (
+ MODEL_UUID varchar(200) not null,
+ VNF_RESOURCE_MODEL_UUID varchar(200),
+ MODEL_INVARIANT_UUID varchar(200),
+ MODEL_VERSION varchar(20) not null,
+ MODEL_NAME varchar(200) not null,
+ DESCRIPTION varchar(1200),
+ IS_BASE integer not null,
+ HEAT_TEMPLATE_ARTIFACT_UUID varchar(200) not null,
+ VOL_HEAT_TEMPLATE_ARTIFACT_UUID varchar(200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_UUID)
+ );
+
+ create table VF_MODULE_CUSTOMIZATION (
+ MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ VF_MODULE_MODEL_UUID varchar(200) not null,
+ VOL_ENVIRONMENT_ARTIFACT_UUID varchar(200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ HEAT_ENVIRONMENT_ARTIFACT_UUID varchar(200),
+ MIN_INSTANCES integer,
+ MAX_INSTANCES integer,
+ INITIAL_COUNT integer,
+ AVAILABILITY_ZONE_COUNT integer,
+ LABEL varchar(200),
+ primary key (MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table VF_MODULE_TO_HEAT_FILES (
+ VF_MODULE_MODEL_UUID varchar(200) not null,
+ HEAT_FILES_ARTIFACT_UUID varchar(200) not null,
+ primary key (VF_MODULE_MODEL_UUID, HEAT_FILES_ARTIFACT_UUID)
+ );
+
+ create table VNF_COMPONENTS (
+ VNF_ID integer not null,
+ COMPONENT_TYPE varchar(20) not null,
+ HEAT_TEMPLATE_ID integer,
+ HEAT_ENVIRONMENT_ID integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (VNF_ID, COMPONENT_TYPE)
+ );
+
+ create table VNF_COMPONENTS_RECIPE (
+ id integer not null auto_increment,
+ VNF_TYPE varchar(200),
+ VF_MODULE_MODEL_UUID varchar(100),
+ VNF_COMPONENT_TYPE varchar(45) not null,
+ ACTION varchar(20) not null,
+ SERVICE_TYPE varchar(45),
+ VERSION varchar(20),
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_URI varchar(256) not null,
+ VNF_COMPONENT_PARAM_XSD varchar(2048),
+ RECIPE_TIMEOUT integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (id)
+ );
+
+ create table VNF_RECIPE (
+ id integer not null auto_increment,
+ VF_MODULE_ID varchar(100),
+ ACTION varchar(20) not null,
+ VERSION_STR varchar(20) not null,
+ VNF_TYPE varchar(200),
+ SERVICE_TYPE varchar(45) default null,
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_URI varchar(256) not null,
+ VNF_PARAM_XSD varchar(2048),
+ RECIPE_TIMEOUT integer,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (id)
+ );
+
+ create table VNF_RESOURCE (
+ MODEL_UUID varchar(200) not null,
+ MODEL_INVARIANT_UUID varchar(200),
+ MODEL_VERSION varchar(20) not null,
+ MODEL_NAME varchar(200),
+ TOSCA_NODE_TYPE varchar(200),
+ DESCRIPTION varchar(1200),
+ ORCHESTRATION_MODE varchar(20) not null,
+ AIC_VERSION_MIN varchar(20),
+ AIC_VERSION_MAX varchar(20),
+ HEAT_TEMPLATE_ARTIFACT_UUID varchar(200),
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_UUID)
+ );
+
+ create table VNF_RESOURCE_CUSTOMIZATION (
+ MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ MODEL_INSTANCE_NAME varchar(200) not null,
+ MIN_INSTANCES integer,
+ MAX_INSTANCES integer,
+ AVAILABILITY_ZONE_MAX_COUNT integer,
+ NF_FUNCTION varchar(200),
+ NF_TYPE varchar(200),
+ NF_ROLE varchar(200),
+ NF_NAMING_CODE varchar(200),
+ VNF_RESOURCE_MODEL_UUID varchar(200) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (MODEL_CUSTOMIZATION_UUID)
+ );
+
+ create table VNF_RES_CUSTOM_TO_VF_MODULE_CUSTOM (
+ VNF_RESOURCE_CUST_MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ VF_MODULE_CUST_MODEL_CUSTOMIZATION_UUID varchar(200) not null,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (VNF_RESOURCE_CUST_MODEL_CUSTOMIZATION_UUID, VF_MODULE_CUST_MODEL_CUSTOMIZATION_UUID)
+ );
+
+ alter table MODEL
+ add constraint UK_rra00f1rk6eyy7g00k9raxh2v unique (MODEL_TYPE, MODEL_VERSION_ID);
+
+ alter table MODEL_RECIPE
+ add constraint UK_b4g8j9wtqrkxfycyi3ursk7gb unique (MODEL_ID, ACTION);
+
+ alter table NETWORK_RECIPE
+ add constraint UK_pbsa8i44m8p10f9529jdgfuk9 unique (MODEL_NAME, ACTION, VERSION_STR);
+
+ alter table SERVICE_RECIPE
+ add constraint UK_2lr377dpqnvl5aqlp5dtj2fcp unique (SERVICE_MODEL_UUID, ACTION);
+
+ alter table VNF_COMPONENTS_RECIPE
+ add constraint UK_g3je95aaxxiuest25f0qoy2u8 unique (VNF_TYPE, VF_MODULE_MODEL_UUID, VNF_COMPONENT_TYPE, ACTION, SERVICE_TYPE, VERSION);
+
+ alter table VNF_RECIPE
+ add constraint UK_f3tvqau498vrifq3cr8qnigkr unique (VF_MODULE_ID, ACTION, VERSION_STR);
+
+ alter table HEAT_TEMPLATE_PARAMS
+ add constraint FK_p3ol1xcvp831glqohrlu6o07o
+ foreign key (HEAT_TEMPLATE_ARTIFACT_UUID)
+ references HEAT_TEMPLATE (ARTIFACT_UUID);
+
+ alter table MODEL_RECIPE
+ add constraint FK_c23r0puyqug6n44jg39dutm1c
+ foreign key (MODEL_ID)
+ references MODEL (id);
+
+ alter table SERVICE
+ add constraint FK_l3qy594u2xr1tfpmma3uigsna
+ foreign key (TOSCA_CSAR_ARTIFACT_UUID)
+ references TOSCA_CSAR (ARTIFACT_UUID);
+
+ alter table SERVICE_RECIPE
+ add constraint FK_i3r1b8j6e7dg9hkp49evnnm5y
+ foreign key (SERVICE_MODEL_UUID)
+ references SERVICE (MODEL_UUID);
+
+ alter table SERVICE_TO_RESOURCE_CUSTOMIZATIONS
+ add constraint FK_kiddaay6cfe0aob1f1jaio1bb
+ foreign key (SERVICE_MODEL_UUID)
+ references SERVICE (MODEL_UUID);
+
+ alter table VF_MODULE
+ add constraint FK_12jptc9it7gs3pru08skobxxc
+ foreign key (VNF_RESOURCE_MODEL_UUID)
+ references VNF_RESOURCE (MODEL_UUID);
+
+ alter table VNF_RESOURCE_CUSTOMIZATION
+ add constraint FK_iff1ayhb1hrp5jhea3vvikuni
+ foreign key (VNF_RESOURCE_MODEL_UUID)
+ references VNF_RESOURCE (MODEL_UUID);
diff --git a/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Requests-schema.sql b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Requests-schema.sql
new file mode 100644
index 000000000..f64548e23
--- /dev/null
+++ b/test/csit/scripts/so/mariadb/docker-entrypoint-initdb.d/db-sql-scripts/main-schemas/MySQL-Requests-schema.sql
@@ -0,0 +1,84 @@
+
+ drop table if exists INFRA_ACTIVE_REQUESTS;
+
+ drop table if exists SITE_STATUS;
+
+ create table INFRA_ACTIVE_REQUESTS (
+ REQUEST_ID varchar(45) not null,
+ CLIENT_REQUEST_ID varchar(45),
+ ACTION varchar(45) not null,
+ REQUEST_STATUS varchar(20),
+ STATUS_MESSAGE varchar(2000),
+ PROGRESS bigint,
+ START_TIME datetime,
+ END_TIME datetime,
+ SOURCE varchar(45),
+ VNF_ID varchar(45),
+ VNF_NAME varchar(80),
+ VNF_TYPE varchar(200),
+ SERVICE_TYPE varchar(45),
+ AIC_NODE_CLLI varchar(11),
+ TENANT_ID varchar(45),
+ PROV_STATUS varchar(20),
+ VNF_PARAMS longtext,
+ VNF_OUTPUTS longtext,
+ REQUEST_BODY longtext,
+ RESPONSE_BODY longtext,
+ LAST_MODIFIED_BY varchar(50),
+ MODIFY_TIME datetime,
+ REQUEST_TYPE varchar(20),
+ VOLUME_GROUP_ID varchar(45),
+ VOLUME_GROUP_NAME varchar(45),
+ VF_MODULE_ID varchar(45),
+ VF_MODULE_NAME varchar(200),
+ VF_MODULE_MODEL_NAME varchar(200),
+ AAI_SERVICE_ID varchar(50),
+ AIC_CLOUD_REGION varchar(11),
+ CALLBACK_URL varchar(200),
+ CORRELATOR varchar(80),
+ SERVICE_INSTANCE_ID varchar(45),
+ SERVICE_INSTANCE_NAME varchar(80),
+ REQUEST_SCOPE varchar(20),
+ REQUEST_ACTION varchar(45) not null,
+ NETWORK_ID varchar(45),
+ NETWORK_NAME varchar(80),
+ NETWORK_TYPE varchar(80),
+ REQUESTOR_ID varchar(80),
+ primary key (REQUEST_ID)
+ );
+
+ create table SITE_STATUS (
+ SITE_NAME varchar(255) not null,
+ STATUS bit,
+ CREATION_TIMESTAMP datetime default CURRENT_TIMESTAMP,
+ primary key (SITE_NAME)
+ );
+ create table OPERATION_STATUS (
+ SERVICE_ID varchar(255) not null,
+ OPERATION_ID varchar(255) not null,
+ SERVICE_NAME varchar(255),
+ OPERATION_TYPE varchar(255),
+ USER_ID varchar(255),
+ RESULT varchar(255),
+ OPERATION_CONTENT varchar(255),
+ PROGRESS varchar(255),
+ REASON varchar(255),
+ OPERATE_AT datetime,
+ FINISHED_AT datetime,
+ primary key (SERVICE_ID,OPERATION_ID)
+ );
+ create table RESOURCE_OPERATION_STATUS (
+ SERVICE_ID varchar(255) not null,
+ OPERATION_ID varchar(255) not null,
+ RESOURCE_TEMPLATE_UUID varchar(255) not null,
+ OPER_TYPE varchar(255),
+ RESOURCE_INSTANCE_ID varchar(255),
+ JOB_ID varchar(255),
+ STATUS varchar(255),
+ PROGRESS varchar(255),
+ ERROR_CODE varchar(255) ,
+ STATUS_DESCRIPOTION varchar(255) ,
+ primary key (SERVICE_ID,OPERATION_ID,RESOURCE_TEMPLATE_UUID)
+ );
+ alter table INFRA_ACTIVE_REQUESTS
+ add constraint UK_bhu6w8p7wvur4pin0gjw2d5ak unique (CLIENT_REQUEST_ID);
diff --git a/test/csit/tests/clamp/APIs/01__TCA.robot b/test/csit/tests/clamp/APIs/01__TCA.robot
index c0b268bf1..c96d9bcab 100644
--- a/test/csit/tests/clamp/APIs/01__TCA.robot
+++ b/test/csit/tests/clamp/APIs/01__TCA.robot
@@ -44,19 +44,10 @@ Put Requests to add Close Loop TCA Model1
${resp}= Put Request clamp /restservices/clds/v1/clds/model/TCAModel1 data=${data} headers=${headers}
Should Be Equal As Strings ${resp.status_code} 200
-Put Requests to add Close Loop TCA Model2
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
- Create Session clamp http://localhost:8080 auth=${auth}
- ${data}= Get Binary File ${CURDIR}${/}data${/}createTCAModel2.json
- &{headers}= Create Dictionary Content-Type=application/json
- ${resp}= Put Request clamp /restservices/clds/v1/clds/model/TCAModel2 data=${data} headers=${headers}
- Should Be Equal As Strings ${resp.status_code} 200
-
-Get Requests verify TCA Model1 and Model2 found
+Get Requests verify TCA Model1 found
${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Be Equal As Strings ${resp.status_code} 200
Should Contain Match ${resp} *TCAModel1*
- Should Contain Match ${resp} *TCAModel2*
Should Not Contain Match ${resp} *TCAModel99*
diff --git a/test/csit/tests/dcaegen2/testcases/resources/VES-4.27.2-dataformat.json b/test/csit/tests/dcaegen2/testcases/resources/VES-4.27.2-dataformat.json
index d4e3e4944..45faaa5ab 100644
--- a/test/csit/tests/dcaegen2/testcases/resources/VES-4.27.2-dataformat.json
+++ b/test/csit/tests/dcaegen2/testcases/resources/VES-4.27.2-dataformat.json
@@ -1,1176 +1,1161 @@
-{
- "self": {
- "name": "VES_specification",
- "version": "4.27.2",
- "description": "VES spec from v4.1 and 27.2 spec"
-
- },
- "dataformatversion": "1.0.0",
- "jsonschema":
- {
- "$schema": "http://json-schema.org/draft-04/schema#",
-
- "definitions": {
- "attCopyrightNotice": {
- "description": "Copyright (c) <2016>, AT&T Intellectual Property. All other rights reserved",
- "type": "object",
- "properties": {
- "useAndRedistribution": {
- "description": "Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:",
- "type": "string"
- },
- "condition1": {
- "description": "Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.",
- "type": "string"
- },
- "condition2": {
- "description": "Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.",
- "type": "string"
- },
- "condition3": {
- "description": "All advertising materials mentioning features or use of this software must display the following acknowledgement: This product includes software developed by the AT&T.",
- "type": "string"
- },
- "condition4": {
- "description": "Neither the name of AT&T nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.",
- "type": "string"
- },
- "disclaimerLine1": {
- "description": "THIS SOFTWARE IS PROVIDED BY AT&T INTELLECTUAL PROPERTY AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS",
- "type": "string"
- },
- "disclaimerLine2": {
- "description": "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL AT&T INTELLECTUAL PROPERTY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES",
- "type": "string"
- },
- "disclaimerLine3": {
- "description": "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,",
- "type": "string"
- },
- "disclaimerLine4": {
- "description": "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.",
- "type": "string"
- }
- }
- },
- "codecsInUse": {
- "description": "number of times an identified codec was used over the measurementInterval",
- "type": "object",
- "properties": {
- "codecIdentifier": { "type": "string" },
- "numberInUse": { "type": "number" }
- },
- "required": [ "codecIdentifier", "numberInUse" ]
- },
- "command": {
- "description": "command from an event collector toward an event source",
- "type": "object",
- "properties": {
- "commandType": {
- "type": "string",
- "enum": [
- "heartbeatIntervalChange",
- "measurementIntervalChange",
- "provideThrottlingState",
- "throttlingSpecification"
- ]
- },
- "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" },
- "measurementInterval": { "type": "number" }
- },
- "required": [ "commandType" ]
- },
- "commandList": {
- "description": "array of commands from an event collector toward an event source",
- "type": "array",
- "items": {
- "$ref": "#/definitions/commandListEntry"
- },
- "minItems": 0
- },
- "commandListEntry": {
- "description": "reference to a command object",
- "type": "object",
- "properties": {
- "command": {"$ref": "#/definitions/command"}
- },
- "required": [ "command" ]
- },
- "commonEventHeader": {
- "description": "fields common to all events",
- "type": "object",
- "properties": {
- "domain": {
- "description": "the eventing domain associated with the event",
- "type": "string",
- "enum": [
- "fault",
- "heartbeat",
- "measurementsForVfScaling",
- "mobileFlow",
- "other",
- "stateChange",
- "syslog",
- "thresholdCrossingAlert"
- ]
- },
- "eventId": {
- "description": "event key that is unique to the event source",
- "type": "string"
- },
- "eventType": {
- "description": "unique event topic name",
- "type": "string"
- },
- "functionalRole": {
- "description": "function of the event source e.g., eNodeB, MME, PCRF",
- "type": "string"
- },
- "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" },
- "lastEpochMicrosec": {
- "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "priority": {
- "description": "processing priority",
- "type": "string",
- "enum": [
- "High",
- "Medium",
- "Normal",
- "Low"
- ]
- },
- "reportingEntityId": {
- "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "reportingEntityName": {
- "description": "name of the entity reporting the event, for example, an OAM VM",
- "type": "string"
- },
- "sequence": {
- "description": "ordering of events communicated by an event source instance or 0 if not needed",
- "type": "integer"
- },
- "sourceId": {
- "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "sourceName": {
- "description": "name of the entity experiencing the event issue",
- "type": "string"
- },
- "startEpochMicrosec": {
- "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "version": {
- "description": "version of the event header",
- "type": "number"
- }
- },
- "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec",
- "priority", "reportingEntityName", "sequence",
- "sourceName", "startEpochMicrosec" ]
- },
- "counter": {
- "description": "performance counter",
- "type": "object",
- "properties": {
- "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] },
- "name": { "type": "string" },
- "thresholdCrossed": { "type": "string" },
- "value": { "type": "string"}
- },
- "required": [ "criticality", "name", "thresholdCrossed", "value" ]
- },
- "cpuUsage": {
- "description": "percent usage of an identified CPU",
- "type": "object",
- "properties": {
- "cpuIdentifier": { "type": "string" },
- "percentUsage": { "type": "number" }
- },
- "required": [ "cpuIdentifier", "percentUsage" ]
- },
- "errors": {
- "description": "receive and transmit errors for the measurements domain",
- "type": "object",
- "properties": {
- "receiveDiscards": { "type": "number" },
- "receiveErrors": { "type": "number" },
- "transmitDiscards": { "type": "number" },
- "transmitErrors": { "type": "number" }
- },
- "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ]
- },
- "event": {
- "description": "the root level of the common event format",
- "type": "object",
- "properties": {
- "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" },
- "faultFields": { "$ref": "#/definitions/faultFields" },
- "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" },
- "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" },
- "otherFields": { "$ref": "#/definitions/otherFields" },
- "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" },
- "syslogFields": { "$ref": "#/definitions/syslogFields" },
- "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" }
- },
- "required": [ "commonEventHeader" ]
- },
- "eventDomainThrottleSpecification": {
- "description": "specification of what information to suppress within an event domain",
- "type": "object",
- "properties": {
- "eventDomain": {
- "description": "Event domain enum from the commonEventHeader domain field",
- "type": "string"
- },
- "suppressedFieldNames": {
- "description": "List of optional field names in the event block that should not be sent to the Event Listener",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "suppressedNvPairsList": {
- "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field",
- "type": "array",
- "items": {
- "$ref": "#/definitions/suppressedNvPairs"
- }
- }
- },
- "required": [ "eventDomain" ]
- },
- "eventDomainThrottleSpecificationList": {
- "description": "array of eventDomainThrottleSpecifications",
- "type": "array",
- "items": {
- "$ref": "#/definitions/eventDomainThrottleSpecification"
- },
- "minItems": 0
- },
- "eventList": {
- "description": "array of events",
- "type": "array",
- "items": {
- "$ref": "#/definitions/event"
- }
- },
- "eventThrottlingState": {
- "description": "reports the throttling in force at the event source",
- "type": "object",
- "properties": {
- "eventThrottlingMode": {
- "description": "Mode the event manager is in",
- "type": "string",
- "enum": [
- "normal",
- "throttled"
- ]
- },
- "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" }
- },
- "required": [ "eventThrottlingMode" ]
- },
- "faultFields": {
- "description": "fields specific to fault events",
- "type": "object",
- "properties": {
- "alarmAdditionalInformation": {
- "description": "additional alarm information",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "alarmCondition": {
- "description": "alarm condition reported by the device",
- "type": "string"
- },
- "alarmInterfaceA": {
- "description": "card, port, channel or interface name of the device generating the alarm",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity or priority",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventSourceType": {
- "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "faultFieldsVersion": {
- "description": "version of the faultFields block",
- "type": "number"
- },
- "specificProblem": {
- "description": "short description of the alarm or problem",
- "type": "string"
- },
- "vfStatus": {
- "description": "virtual function status enumeration",
- "type": "string",
- "enum": [
- "Active",
- "Idle",
- "Preparing to terminate",
- "Ready to terminate",
- "Requesting termination"
- ]
- }
- },
- "required": [ "alarmCondition", "eventSeverity",
- "eventSourceType", "specificProblem", "vfStatus" ]
- },
- "featuresInUse": {
- "description": "number of times an identified feature was used over the measurementInterval",
- "type": "object",
- "properties": {
- "featureIdentifier": { "type": "string" },
- "featureUtilization": { "type": "number" }
- },
- "required": [ "featureIdentifier", "featureUtilization" ]
- },
- "field": {
- "description": "name value pair",
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "value": { "type": "string" }
- },
- "required": [ "name", "value" ]
- },
- "filesystemUsage": {
- "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second",
- "type": "object",
- "properties": {
- "blockConfigured": { "type": "number" },
- "blockIops": { "type": "number" },
- "blockUsed": { "type": "number" },
- "ephemeralConfigured": { "type": "number" },
- "ephemeralIops": { "type": "number" },
- "ephemeralUsed": { "type": "number" },
- "filesystemName": { "type": "string" }
- },
- "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured",
- "ephemeralIops", "ephemeralUsed", "filesystemName" ]
- },
- "gtpPerFlowMetrics": {
- "description": "Mobility GTP Protocol per flow metrics",
- "type": "object",
- "properties": {
- "avgBitErrorRate": {
- "description": "average bit error rate",
- "type": "number"
- },
- "avgPacketDelayVariation": {
- "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "avgPacketLatency": {
- "description": "average delivery latency",
- "type": "number"
- },
- "avgReceiveThroughput": {
- "description": "average receive throughput",
- "type": "number"
- },
- "avgTransmitThroughput": {
- "description": "average transmit throughput",
- "type": "number"
- },
- "durConnectionFailedStatus": {
- "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval",
- "type": "number"
- },
- "durTunnelFailedStatus": {
- "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval",
- "type": "number"
- },
- "flowActivatedBy": {
- "description": "Endpoint activating the flow",
- "type": "string"
- },
- "flowActivationEpoch": {
- "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available",
- "type": "number"
- },
- "flowActivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowActivationTime": {
- "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowDeactivatedBy": {
- "description": "Endpoint deactivating the flow",
- "type": "string"
- },
- "flowDeactivationEpoch": {
- "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time",
- "type": "number"
- },
- "flowDeactivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowDeactivationTime": {
- "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowStatus": {
- "description": "connection status at reporting time as a working / inactive / failed indicator value",
- "type": "string"
- },
- "gtpConnectionStatus": {
- "description": "Current connection state at reporting time",
- "type": "string"
- },
- "gtpTunnelStatus": {
- "description": "Current tunnel state at reporting time",
- "type": "string"
- },
- "ipTosCountList": {
- "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow",
- "type": "array",
- "items": {
- "type": "array",
- "items": [
- { "type": "string" },
- { "type": "number" }
- ]
- }
- },
- "ipTosList": {
- "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "largePacketRtt": {
- "description": "large packet round trip time",
- "type": "number"
- },
- "largePacketThreshold": {
- "description": "large packet threshold being applied",
- "type": "number"
- },
- "maxPacketDelayVariation": {
- "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "maxReceiveBitRate": {
- "description": "maximum receive bit rate",
- "type": "number"
- },
- "maxTransmitBitRate": {
- "description": "maximum transmit bit rate",
- "type": "number"
- },
- "mobileQciCosCountList": {
- "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow",
- "type": "array",
- "items": {
- "type": "array",
- "items": [
- { "type": "string" },
- { "type": "number" }
- ]
- }
- },
- "mobileQciCosList": {
- "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "numActivationFailures": {
- "description": "Number of failed activation requests, as observed by the reporting node",
- "type": "number"
- },
- "numBitErrors": {
- "description": "number of errored bits",
- "type": "number"
- },
- "numBytesReceived": {
- "description": "number of bytes received, including retransmissions",
- "type": "number"
- },
- "numBytesTransmitted": {
- "description": "number of bytes transmitted, including retransmissions",
- "type": "number"
- },
- "numDroppedPackets": {
- "description": "number of received packets dropped due to errors per virtual interface",
- "type": "number"
- },
- "numGtpEchoFailures": {
- "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2",
- "type": "number"
- },
- "numGtpTunnelErrors": {
- "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1",
- "type": "number"
- },
- "numHttpErrors": {
- "description": "Http error count",
- "type": "number"
- },
- "numL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, including retransmissions",
- "type": "number"
- },
- "numL7BytesTransmitted": {
- "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions",
- "type": "number"
- },
- "numLostPackets": {
- "description": "number of lost packets",
- "type": "number"
- },
- "numOutOfOrderPackets": {
- "description": "number of out-of-order packets",
- "type": "number"
- },
- "numPacketErrors": {
- "description": "number of errored packets",
- "type": "number"
- },
- "numPacketsReceivedExclRetrans": {
- "description": "number of packets received, excluding retransmission",
- "type": "number"
- },
- "numPacketsReceivedInclRetrans": {
- "description": "number of packets received, including retransmission",
- "type": "number"
- },
- "numPacketsTransmittedInclRetrans": {
- "description": "number of packets transmitted, including retransmissions",
- "type": "number"
- },
- "numRetries": {
- "description": "number of packet retries",
- "type": "number"
- },
- "numTimeouts": {
- "description": "number of packet timeouts",
- "type": "number"
- },
- "numTunneledL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, excluding retransmissions",
- "type": "number"
- },
- "roundTripTime": {
- "description": "round trip time",
- "type": "number"
- },
- "tcpFlagCountList": {
- "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow",
- "type": "array",
- "items": {
- "type": "array",
- "items": [
- { "type": "string" },
- { "type": "number" }
- ]
- }
- },
- "tcpFlagList": {
- "description": "Array of unique TCP Flags observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "timeToFirstByte": {
- "description": "Time in milliseconds between the connection activation and first byte received",
- "type": "number"
- }
- },
- "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency",
- "avgReceiveThroughput", "avgTransmitThroughput",
- "flowActivationEpoch", "flowActivationMicrosec",
- "flowDeactivationEpoch", "flowDeactivationMicrosec",
- "flowDeactivationTime", "flowStatus",
- "maxPacketDelayVariation", "numActivationFailures",
- "numBitErrors", "numBytesReceived", "numBytesTransmitted",
- "numDroppedPackets", "numL7BytesReceived",
- "numL7BytesTransmitted", "numLostPackets",
- "numOutOfOrderPackets", "numPacketErrors",
- "numPacketsReceivedExclRetrans",
- "numPacketsReceivedInclRetrans",
- "numPacketsTransmittedInclRetrans",
- "numRetries", "numTimeouts", "numTunneledL7BytesReceived",
- "roundTripTime", "timeToFirstByte"
- ]
- },
- "internalHeaderFields": {
- "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources",
- "type": "object"
- },
- "latencyBucketMeasure": {
- "description": "number of counts falling within a defined latency bucket",
- "type": "object",
- "properties": {
- "countsInTheBucket": { "type": "number" },
- "highEndOfLatencyBucket": { "type": "number" },
- "lowEndOfLatencyBucket": { "type": "number" }
- },
- "required": [ "countsInTheBucket" ]
- },
- "measurementGroup": {
- "description": "measurement group",
- "type": "object",
- "properties": {
- "name": { "type": "string" },
- "measurements": {
- "description": "array of name value pair measurements",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- }
- },
- "required": [ "name", "measurements" ]
- },
- "measurementsForVfScalingFields": {
- "description": "measurementsForVfScaling fields",
- "type": "object",
- "properties": {
- "additionalMeasurements": {
- "description": "additional measurement fields",
- "type": "array",
- "items": {
- "$ref": "#/definitions/measurementGroup"
- }
- },
- "aggregateCpuUsage": {
- "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running",
- "type": "number"
- },
- "codecUsageArray": {
- "description": "array of codecs in use",
- "type": "array",
- "items": {
- "$ref": "#/definitions/codecsInUse"
- }
- },
- "concurrentSessions": {
- "description": "peak concurrent sessions for the VM or VNF over the measurementInterval",
- "type": "number"
- },
- "configuredEntities": {
- "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF",
- "type": "number"
- },
- "cpuUsageArray": {
- "description": "usage of an array of CPUs",
- "type": "array",
- "items": {
- "$ref": "#/definitions/cpuUsage"
- }
- },
- "errors": { "$ref": "#/definitions/errors" },
- "featureUsageArray": {
- "description": "array of features in use",
- "type": "array",
- "items": {
- "$ref": "#/definitions/featuresInUse"
- }
- },
- "filesystemUsageArray": {
- "description": "filesystem usage of the VM on which the VNFC reporting the event is running",
- "type": "array",
- "items": {
- "$ref": "#/definitions/filesystemUsage"
- }
- },
- "latencyDistribution": {
- "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges",
- "type": "array",
- "items": {
- "$ref": "#/definitions/latencyBucketMeasure"
- }
- },
- "meanRequestLatency": {
- "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running",
- "type": "number"
- },
- "measurementInterval": {
- "description": "interval over which measurements are being reported in seconds",
- "type": "number"
- },
- "measurementsForVfScalingVersion": {
- "description": "version of the measurementsForVfScaling block",
- "type": "number"
- },
- "memoryConfigured": {
- "description": "memory in MB configured in the VM on which the VNFC reporting the event is running",
- "type": "number"
- },
- "memoryUsed": {
- "description": "memory usage in MB of the VM on which the VNFC reporting the event is running",
- "type": "number"
- },
- "numberOfMediaPortsInUse": {
- "description": "number of media ports in use",
- "type": "number"
- },
- "requestRate": {
- "description": "peak rate of service requests per second to the VNF over the measurementInterval",
- "type": "number"
- },
- "vnfcScalingMetric": {
- "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC",
- "type": "number"
- },
- "vNicUsageArray": {
- "description": "usage of an array of virtual network interface cards",
- "type": "array",
- "items": {
- "$ref": "#/definitions/vNicUsage"
- }
- }
- },
- "required": [ "measurementInterval" ]
- },
- "mobileFlowFields": {
- "description": "mobileFlow fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "description": "additional mobileFlow fields if needed",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "applicationType": {
- "description": "Application type inferred",
- "type": "string"
- },
- "appProtocolType": {
- "description": "application protocol",
- "type": "string"
- },
- "appProtocolVersion": {
- "description": "application protocol version",
- "type": "string"
- },
- "cid": {
- "description": "cell id",
- "type": "string"
- },
- "connectionType": {
- "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc",
- "type": "string"
- },
- "ecgi": {
- "description": "Evolved Cell Global Id",
- "type": "string"
- },
- "flowDirection": {
- "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow",
- "type": "string"
- },
- "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" },
- "gtpProtocolType": {
- "description": "GTP protocol",
- "type": "string"
- },
- "gtpVersion": {
- "description": "GTP protocol version",
- "type": "string"
- },
- "httpHeader": {
- "description": "HTTP request header, if the flow connects to a node referenced by HTTP",
- "type": "string"
- },
- "imei": {
- "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "imsi": {
- "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "ipProtocolType": {
- "description": "IP protocol type e.g., TCP, UDP, RTP...",
- "type": "string"
- },
- "ipVersion": {
- "description": "IP protocol version e.g., IPv4, IPv6",
- "type": "string"
- },
- "lac": {
- "description": "location area code",
- "type": "string"
- },
- "mcc": {
- "description": "mobile country code",
- "type": "string"
- },
- "mnc": {
- "description": "mobile network code",
- "type": "string"
- },
- "mobileFlowFieldsVersion": {
- "description": "version of the mobileFlowFields block",
- "type": "number"
- },
- "msisdn": {
- "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device",
- "type": "string"
- },
- "otherEndpointIpAddress": {
- "description": "IP address for the other endpoint, as used for the flow being reported on",
- "type": "string"
- },
- "otherEndpointPort": {
- "description": "IP Port for the reporting entity, as used for the flow being reported on",
- "type": "number"
- },
- "otherFunctionalRole": {
- "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...",
- "type": "string"
- },
- "rac": {
- "description": "routing area code",
- "type": "string"
- },
- "radioAccessTechnology": {
- "description": "Radio Access Technology e.g., 2G, 3G, LTE",
- "type": "string"
- },
- "reportingEndpointIpAddr": {
- "description": "IP address for the reporting entity, as used for the flow being reported on",
- "type": "string"
- },
- "reportingEndpointPort": {
- "description": "IP port for the reporting entity, as used for the flow being reported on",
- "type": "number"
- },
- "sac": {
- "description": "service area code",
- "type": "string"
- },
- "samplingAlgorithm": {
- "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied",
- "type": "number"
- },
- "tac": {
- "description": "transport area code",
- "type": "string"
- },
- "tunnelId": {
- "description": "tunnel identifier",
- "type": "string"
- },
- "vlanId": {
- "description": "VLAN identifier used by this flow",
- "type": "string"
- }
- },
- "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType",
- "ipVersion", "otherEndpointIpAddress", "otherEndpointPort",
- "reportingEndpointIpAddr", "reportingEndpointPort" ]
- },
- "otherFields": {
- "description": "additional fields not reported elsewhere",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "requestError": {
- "description": "standard request error data structure",
- "type": "object",
- "properties": {
- "messageId": {
- "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception",
- "type": "string"
- },
- "text": {
- "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1",
- "type": "string"
- },
- "url": {
- "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents",
- "type": "string"
- },
- "variables": {
- "description": "List of zero or more strings that represent the contents of the variables used by the message text",
- "type": "string"
- }
- },
- "required": [ "messageId", "text" ]
- },
- "stateChangeFields": {
- "description": "stateChange fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "description": "additional stateChange fields if needed",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "newState": {
- "description": "new state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "oldState": {
- "description": "previous state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "stateChangeFieldsVersion": {
- "description": "version of the stateChangeFields block",
- "type": "number"
- },
- "stateInterface": {
- "description": "card or port name of the entity that changed state",
- "type": "string"
- }
- },
- "required": [ "newState", "oldState", "stateInterface" ]
- },
- "suppressedNvPairs": {
- "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling",
- "type": "object",
- "properties": {
- "nvPairFieldName": {
- "description": "Name of the field within which are the nvpair names to suppress",
- "type": "string"
- },
- "suppressedNvPairNames": {
- "description": "Array of nvpair names to suppress within the nvpairFieldName",
- "type": "array",
- "items": {
- "type": "string"
- }
- }
- },
- "required": [ "nvPairFieldName", "suppressedNvPairNames" ]
- },
- "syslogFields": {
- "description": "sysLog fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "description": "additional syslog fields if needed",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "eventSourceHost": {
- "description": "hostname of the device",
- "type": "string"
- },
- "eventSourceType": {
- "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "syslogFacility": {
- "description": "numeric code from 0 to 23 for facility--see table in documentation",
- "type": "number"
- },
- "syslogFieldsVersion": {
- "description": "version of the syslogFields block",
- "type": "number"
- },
- "syslogMsg": {
- "description": "syslog message",
- "type": "string"
- },
- "syslogPri": {
- "description": "0-192 combined severity and facility",
- "type": "number"
- },
- "syslogProc": {
- "description": "identifies the application that originated the message",
- "type": "string"
- },
- "syslogProcId": {
- "description": "a change in the value of this field indicates a discontinuity in syslog reporting",
- "type": "number"
- },
- "syslogSData": {
- "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs",
- "type": "string"
- },
- "syslogSdId": {
- "description": "0-32 char in format name@number for example ourSDID@32473",
- "type": "string"
- },
- "syslogSev": {
- "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8",
- "type": "string"
- },
- "syslogTag": {
- "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided",
- "type": "string"
- },
- "syslogVer": {
- "description": "IANA assigned version of the syslog protocol specification - typically 1",
- "type": "number"
- }
- },
- "required": [ "eventSourceType", "syslogMsg", "syslogTag" ]
- },
- "thresholdCrossingAlertFields": {
- "description": "fields specific to threshold crossing alert events",
- "type": "object",
- "properties": {
- "additionalFields": {
- "description": "additional threshold crossing alert fields if needed",
- "type": "array",
- "items": {
- "$ref": "#/definitions/field"
- }
- },
- "additionalParameters": {
- "description": "performance counters",
- "type": "array",
- "items": {
- "$ref": "#/definitions/counter"
- }
- },
- "alertAction": {
- "description": "Event action",
- "type": "string",
- "enum": [
- "CLEAR",
- "CONT",
- "SET"
- ]
- },
- "alertDescription": {
- "description": "Unique short alert description such as IF-SHUB-ERRDROP",
- "type": "string"
- },
- "alertType": {
- "description": "Event type",
- "type": "string",
- "enum": [
- "CARD-ANOMALY",
- "ELEMENT-ANOMALY",
- "INTERFACE-ANOMALY",
- "SERVICE-ANOMALY"
- ]
- },
- "alertValue": {
- "description": "Calculated API value (if applicable)",
- "type": "string"
- },
- "associatedAlertIdList": {
- "description": "List of eventIds associated with the event being reported",
- "type": "array",
- "items": { "type": "string" }
- },
- "collectionTimestamp": {
- "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "dataCollector": {
- "description": "Specific performance collector instance used",
- "type": "string"
- },
- "elementType": {
- "description": "type of network element - internal ATT field",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity or priority",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventStartTimestamp": {
- "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "interfaceName": {
- "description": "Physical or logical port or card (if applicable)",
- "type": "string"
- },
- "networkService": {
- "description": "network name - internal ATT field",
- "type": "string"
- },
- "possibleRootCause": {
- "description": "Reserved for future use",
- "type": "string"
- },
- "thresholdCrossingFieldsVersion": {
- "description": "version of the thresholdCrossingAlertFields block",
- "type": "number"
- }
- },
- "required": [
- "additionalParameters",
- "alertAction",
- "alertDescription",
- "alertType",
- "collectionTimestamp",
- "eventSeverity",
- "eventStartTimestamp"
- ]
- },
- "vNicUsage": {
- "description": "usage of identified virtual network interface card",
- "type": "object",
- "properties": {
- "broadcastPacketsIn": { "type": "number" },
- "broadcastPacketsOut": { "type": "number" },
- "bytesIn": { "type": "number" },
- "bytesOut": { "type": "number" },
- "multicastPacketsIn": { "type": "number" },
- "multicastPacketsOut": { "type": "number" },
- "packetsIn": { "type": "number" },
- "packetsOut": { "type": "number" },
- "unicastPacketsIn": { "type": "number" },
- "unicastPacketsOut": { "type": "number" },
- "vNicIdentifier": { "type": "string" }
- },
- "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"]
- }
- },
- "title": "Event Listener",
- "type": "object",
- "properties": {
- "event": {"$ref": "#/definitions/event"}
- }
- }
-
-} \ No newline at end of file
+{
+ "self": {
+ "name": "VES_specification",
+ "version": "4.27.2",
+ "description": "VES spec from v4.1 and 27.2 spec"
+
+ },
+ "dataformatversion": "1.0.0",
+ "jsonschema":
+ {
+ "$schema": "http://json-schema.org/draft-04/schema#",
+
+ "definitions": {
+ "attCopyrightNotice": {
+ "description": "Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.",
+ "type": "object",
+ "properties": {
+ "useAndRedistribution": {
+ "description": "Unless otherwise specified, all software contained herein is licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License.You may obtain a copy of the License at",
+ "type": "string"
+ },
+ "licenseLink": "http://www.apache.org/licenses/LICENSE-2.0",
+ "condition1": {
+ "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS,",
+ "type": "string"
+ },
+ "condition2": {
+ "description": "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
+ "type": "string"
+ },
+ "condition3": {
+ "description": "See the License for the specific language governing permissions and limitations under the License.",
+ "type": "string"
+ },
+ "Trademarks": {
+ "description": "ECOMP is a trademark and service mark of AT&T Intellectual Property.",
+ "type": "string"
+ }
+ }
+ },
+ "codecsInUse": {
+ "description": "number of times an identified codec was used over the measurementInterval",
+ "type": "object",
+ "properties": {
+ "codecIdentifier": { "type": "string" },
+ "numberInUse": { "type": "number" }
+ },
+ "required": [ "codecIdentifier", "numberInUse" ]
+ },
+ "command": {
+ "description": "command from an event collector toward an event source",
+ "type": "object",
+ "properties": {
+ "commandType": {
+ "type": "string",
+ "enum": [
+ "heartbeatIntervalChange",
+ "measurementIntervalChange",
+ "provideThrottlingState",
+ "throttlingSpecification"
+ ]
+ },
+ "eventDomainThrottleSpecification": { "$ref": "#/definitions/eventDomainThrottleSpecification" },
+ "measurementInterval": { "type": "number" }
+ },
+ "required": [ "commandType" ]
+ },
+ "commandList": {
+ "description": "array of commands from an event collector toward an event source",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/commandListEntry"
+ },
+ "minItems": 0
+ },
+ "commandListEntry": {
+ "description": "reference to a command object",
+ "type": "object",
+ "properties": {
+ "command": {"$ref": "#/definitions/command"}
+ },
+ "required": [ "command" ]
+ },
+ "commonEventHeader": {
+ "description": "fields common to all events",
+ "type": "object",
+ "properties": {
+ "domain": {
+ "description": "the eventing domain associated with the event",
+ "type": "string",
+ "enum": [
+ "fault",
+ "heartbeat",
+ "measurementsForVfScaling",
+ "mobileFlow",
+ "other",
+ "stateChange",
+ "syslog",
+ "thresholdCrossingAlert"
+ ]
+ },
+ "eventId": {
+ "description": "event key that is unique to the event source",
+ "type": "string"
+ },
+ "eventType": {
+ "description": "unique event topic name",
+ "type": "string"
+ },
+ "functionalRole": {
+ "description": "function of the event source e.g., eNodeB, MME, PCRF",
+ "type": "string"
+ },
+ "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" },
+ "lastEpochMicrosec": {
+ "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
+ "type": "number"
+ },
+ "priority": {
+ "description": "processing priority",
+ "type": "string",
+ "enum": [
+ "High",
+ "Medium",
+ "Normal",
+ "Low"
+ ]
+ },
+ "reportingEntityId": {
+ "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process",
+ "type": "string"
+ },
+ "reportingEntityName": {
+ "description": "name of the entity reporting the event, for example, an OAM VM",
+ "type": "string"
+ },
+ "sequence": {
+ "description": "ordering of events communicated by an event source instance or 0 if not needed",
+ "type": "integer"
+ },
+ "sourceId": {
+ "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process",
+ "type": "string"
+ },
+ "sourceName": {
+ "description": "name of the entity experiencing the event issue",
+ "type": "string"
+ },
+ "startEpochMicrosec": {
+ "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
+ "type": "number"
+ },
+ "version": {
+ "description": "version of the event header",
+ "type": "number"
+ }
+ },
+ "required": [ "domain", "eventId", "functionalRole", "lastEpochMicrosec",
+ "priority", "reportingEntityName", "sequence",
+ "sourceName", "startEpochMicrosec" ]
+ },
+ "counter": {
+ "description": "performance counter",
+ "type": "object",
+ "properties": {
+ "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] },
+ "name": { "type": "string" },
+ "thresholdCrossed": { "type": "string" },
+ "value": { "type": "string"}
+ },
+ "required": [ "criticality", "name", "thresholdCrossed", "value" ]
+ },
+ "cpuUsage": {
+ "description": "percent usage of an identified CPU",
+ "type": "object",
+ "properties": {
+ "cpuIdentifier": { "type": "string" },
+ "percentUsage": { "type": "number" }
+ },
+ "required": [ "cpuIdentifier", "percentUsage" ]
+ },
+ "errors": {
+ "description": "receive and transmit errors for the measurements domain",
+ "type": "object",
+ "properties": {
+ "receiveDiscards": { "type": "number" },
+ "receiveErrors": { "type": "number" },
+ "transmitDiscards": { "type": "number" },
+ "transmitErrors": { "type": "number" }
+ },
+ "required": [ "receiveDiscards", "receiveErrors", "transmitDiscards", "transmitErrors" ]
+ },
+ "event": {
+ "description": "the root level of the common event format",
+ "type": "object",
+ "properties": {
+ "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" },
+ "faultFields": { "$ref": "#/definitions/faultFields" },
+ "measurementsForVfScalingFields": { "$ref": "#/definitions/measurementsForVfScalingFields" },
+ "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" },
+ "otherFields": { "$ref": "#/definitions/otherFields" },
+ "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" },
+ "syslogFields": { "$ref": "#/definitions/syslogFields" },
+ "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" }
+ },
+ "required": [ "commonEventHeader" ]
+ },
+ "eventDomainThrottleSpecification": {
+ "description": "specification of what information to suppress within an event domain",
+ "type": "object",
+ "properties": {
+ "eventDomain": {
+ "description": "Event domain enum from the commonEventHeader domain field",
+ "type": "string"
+ },
+ "suppressedFieldNames": {
+ "description": "List of optional field names in the event block that should not be sent to the Event Listener",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "suppressedNvPairsList": {
+ "description": "Optional list of specific NvPairsNames to suppress within a given Name-Value Field",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/suppressedNvPairs"
+ }
+ }
+ },
+ "required": [ "eventDomain" ]
+ },
+ "eventDomainThrottleSpecificationList": {
+ "description": "array of eventDomainThrottleSpecifications",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/eventDomainThrottleSpecification"
+ },
+ "minItems": 0
+ },
+ "eventList": {
+ "description": "array of events",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/event"
+ }
+ },
+ "eventThrottlingState": {
+ "description": "reports the throttling in force at the event source",
+ "type": "object",
+ "properties": {
+ "eventThrottlingMode": {
+ "description": "Mode the event manager is in",
+ "type": "string",
+ "enum": [
+ "normal",
+ "throttled"
+ ]
+ },
+ "eventDomainThrottleSpecificationList": { "$ref": "#/definitions/eventDomainThrottleSpecificationList" }
+ },
+ "required": [ "eventThrottlingMode" ]
+ },
+ "faultFields": {
+ "description": "fields specific to fault events",
+ "type": "object",
+ "properties": {
+ "alarmAdditionalInformation": {
+ "description": "additional alarm information",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "alarmCondition": {
+ "description": "alarm condition reported by the device",
+ "type": "string"
+ },
+ "alarmInterfaceA": {
+ "description": "card, port, channel or interface name of the device generating the alarm",
+ "type": "string"
+ },
+ "eventSeverity": {
+ "description": "event severity or priority",
+ "type": "string",
+ "enum": [
+ "CRITICAL",
+ "MAJOR",
+ "MINOR",
+ "WARNING",
+ "NORMAL"
+ ]
+ },
+ "eventSourceType": {
+ "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
+ "type": "string"
+ },
+ "faultFieldsVersion": {
+ "description": "version of the faultFields block",
+ "type": "number"
+ },
+ "specificProblem": {
+ "description": "short description of the alarm or problem",
+ "type": "string"
+ },
+ "vfStatus": {
+ "description": "virtual function status enumeration",
+ "type": "string",
+ "enum": [
+ "Active",
+ "Idle",
+ "Preparing to terminate",
+ "Ready to terminate",
+ "Requesting termination"
+ ]
+ }
+ },
+ "required": [ "alarmCondition", "eventSeverity",
+ "eventSourceType", "specificProblem", "vfStatus" ]
+ },
+ "featuresInUse": {
+ "description": "number of times an identified feature was used over the measurementInterval",
+ "type": "object",
+ "properties": {
+ "featureIdentifier": { "type": "string" },
+ "featureUtilization": { "type": "number" }
+ },
+ "required": [ "featureIdentifier", "featureUtilization" ]
+ },
+ "field": {
+ "description": "name value pair",
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "value": { "type": "string" }
+ },
+ "required": [ "name", "value" ]
+ },
+ "filesystemUsage": {
+ "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second",
+ "type": "object",
+ "properties": {
+ "blockConfigured": { "type": "number" },
+ "blockIops": { "type": "number" },
+ "blockUsed": { "type": "number" },
+ "ephemeralConfigured": { "type": "number" },
+ "ephemeralIops": { "type": "number" },
+ "ephemeralUsed": { "type": "number" },
+ "filesystemName": { "type": "string" }
+ },
+ "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured",
+ "ephemeralIops", "ephemeralUsed", "filesystemName" ]
+ },
+ "gtpPerFlowMetrics": {
+ "description": "Mobility GTP Protocol per flow metrics",
+ "type": "object",
+ "properties": {
+ "avgBitErrorRate": {
+ "description": "average bit error rate",
+ "type": "number"
+ },
+ "avgPacketDelayVariation": {
+ "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets",
+ "type": "number"
+ },
+ "avgPacketLatency": {
+ "description": "average delivery latency",
+ "type": "number"
+ },
+ "avgReceiveThroughput": {
+ "description": "average receive throughput",
+ "type": "number"
+ },
+ "avgTransmitThroughput": {
+ "description": "average transmit throughput",
+ "type": "number"
+ },
+ "durConnectionFailedStatus": {
+ "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval",
+ "type": "number"
+ },
+ "durTunnelFailedStatus": {
+ "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval",
+ "type": "number"
+ },
+ "flowActivatedBy": {
+ "description": "Endpoint activating the flow",
+ "type": "string"
+ },
+ "flowActivationEpoch": {
+ "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available",
+ "type": "number"
+ },
+ "flowActivationMicrosec": {
+ "description": "Integer microseconds for the start of the flow connection",
+ "type": "number"
+ },
+ "flowActivationTime": {
+ "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "flowDeactivatedBy": {
+ "description": "Endpoint deactivating the flow",
+ "type": "string"
+ },
+ "flowDeactivationEpoch": {
+ "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time",
+ "type": "number"
+ },
+ "flowDeactivationMicrosec": {
+ "description": "Integer microseconds for the start of the flow connection",
+ "type": "number"
+ },
+ "flowDeactivationTime": {
+ "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "flowStatus": {
+ "description": "connection status at reporting time as a working / inactive / failed indicator value",
+ "type": "string"
+ },
+ "gtpConnectionStatus": {
+ "description": "Current connection state at reporting time",
+ "type": "string"
+ },
+ "gtpTunnelStatus": {
+ "description": "Current tunnel state at reporting time",
+ "type": "string"
+ },
+ "ipTosCountList": {
+ "description": "array of key: value pairs where the keys are drawn from the IP Type-of-Service identifiers which range from '0' to '255', and the values are the count of packets that had those ToS identifiers in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "ipTosList": {
+ "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "largePacketRtt": {
+ "description": "large packet round trip time",
+ "type": "number"
+ },
+ "largePacketThreshold": {
+ "description": "large packet threshold being applied",
+ "type": "number"
+ },
+ "maxPacketDelayVariation": {
+ "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets",
+ "type": "number"
+ },
+ "maxReceiveBitRate": {
+ "description": "maximum receive bit rate",
+ "type": "number"
+ },
+ "maxTransmitBitRate": {
+ "description": "maximum transmit bit rate",
+ "type": "number"
+ },
+ "mobileQciCosCountList": {
+ "description": "array of key: value pairs where the keys are drawn from LTE QCI or UMTS class of service strings, and the values are the count of packets that had those strings in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "mobileQciCosList": {
+ "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "numActivationFailures": {
+ "description": "Number of failed activation requests, as observed by the reporting node",
+ "type": "number"
+ },
+ "numBitErrors": {
+ "description": "number of errored bits",
+ "type": "number"
+ },
+ "numBytesReceived": {
+ "description": "number of bytes received, including retransmissions",
+ "type": "number"
+ },
+ "numBytesTransmitted": {
+ "description": "number of bytes transmitted, including retransmissions",
+ "type": "number"
+ },
+ "numDroppedPackets": {
+ "description": "number of received packets dropped due to errors per virtual interface",
+ "type": "number"
+ },
+ "numGtpEchoFailures": {
+ "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2",
+ "type": "number"
+ },
+ "numGtpTunnelErrors": {
+ "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1",
+ "type": "number"
+ },
+ "numHttpErrors": {
+ "description": "Http error count",
+ "type": "number"
+ },
+ "numL7BytesReceived": {
+ "description": "number of tunneled layer 7 bytes received, including retransmissions",
+ "type": "number"
+ },
+ "numL7BytesTransmitted": {
+ "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions",
+ "type": "number"
+ },
+ "numLostPackets": {
+ "description": "number of lost packets",
+ "type": "number"
+ },
+ "numOutOfOrderPackets": {
+ "description": "number of out-of-order packets",
+ "type": "number"
+ },
+ "numPacketErrors": {
+ "description": "number of errored packets",
+ "type": "number"
+ },
+ "numPacketsReceivedExclRetrans": {
+ "description": "number of packets received, excluding retransmission",
+ "type": "number"
+ },
+ "numPacketsReceivedInclRetrans": {
+ "description": "number of packets received, including retransmission",
+ "type": "number"
+ },
+ "numPacketsTransmittedInclRetrans": {
+ "description": "number of packets transmitted, including retransmissions",
+ "type": "number"
+ },
+ "numRetries": {
+ "description": "number of packet retries",
+ "type": "number"
+ },
+ "numTimeouts": {
+ "description": "number of packet timeouts",
+ "type": "number"
+ },
+ "numTunneledL7BytesReceived": {
+ "description": "number of tunneled layer 7 bytes received, excluding retransmissions",
+ "type": "number"
+ },
+ "roundTripTime": {
+ "description": "round trip time",
+ "type": "number"
+ },
+ "tcpFlagCountList": {
+ "description": "array of key: value pairs where the keys are drawn from TCP Flags and the values are the count of packets that had that TCP Flag in the flow",
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": [
+ { "type": "string" },
+ { "type": "number" }
+ ]
+ }
+ },
+ "tcpFlagList": {
+ "description": "Array of unique TCP Flags observed in the flow",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "timeToFirstByte": {
+ "description": "Time in milliseconds between the connection activation and first byte received",
+ "type": "number"
+ }
+ },
+ "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency",
+ "avgReceiveThroughput", "avgTransmitThroughput",
+ "flowActivationEpoch", "flowActivationMicrosec",
+ "flowDeactivationEpoch", "flowDeactivationMicrosec",
+ "flowDeactivationTime", "flowStatus",
+ "maxPacketDelayVariation", "numActivationFailures",
+ "numBitErrors", "numBytesReceived", "numBytesTransmitted",
+ "numDroppedPackets", "numL7BytesReceived",
+ "numL7BytesTransmitted", "numLostPackets",
+ "numOutOfOrderPackets", "numPacketErrors",
+ "numPacketsReceivedExclRetrans",
+ "numPacketsReceivedInclRetrans",
+ "numPacketsTransmittedInclRetrans",
+ "numRetries", "numTimeouts", "numTunneledL7BytesReceived",
+ "roundTripTime", "timeToFirstByte"
+ ]
+ },
+ "internalHeaderFields": {
+ "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources",
+ "type": "object"
+ },
+ "latencyBucketMeasure": {
+ "description": "number of counts falling within a defined latency bucket",
+ "type": "object",
+ "properties": {
+ "countsInTheBucket": { "type": "number" },
+ "highEndOfLatencyBucket": { "type": "number" },
+ "lowEndOfLatencyBucket": { "type": "number" }
+ },
+ "required": [ "countsInTheBucket" ]
+ },
+ "measurementGroup": {
+ "description": "measurement group",
+ "type": "object",
+ "properties": {
+ "name": { "type": "string" },
+ "measurements": {
+ "description": "array of name value pair measurements",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ }
+ },
+ "required": [ "name", "measurements" ]
+ },
+ "measurementsForVfScalingFields": {
+ "description": "measurementsForVfScaling fields",
+ "type": "object",
+ "properties": {
+ "additionalMeasurements": {
+ "description": "additional measurement fields",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/measurementGroup"
+ }
+ },
+ "aggregateCpuUsage": {
+ "description": "aggregate CPU usage of the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "codecUsageArray": {
+ "description": "array of codecs in use",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/codecsInUse"
+ }
+ },
+ "concurrentSessions": {
+ "description": "peak concurrent sessions for the VM or VNF over the measurementInterval",
+ "type": "number"
+ },
+ "configuredEntities": {
+ "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the VNF",
+ "type": "number"
+ },
+ "cpuUsageArray": {
+ "description": "usage of an array of CPUs",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/cpuUsage"
+ }
+ },
+ "errors": { "$ref": "#/definitions/errors" },
+ "featureUsageArray": {
+ "description": "array of features in use",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/featuresInUse"
+ }
+ },
+ "filesystemUsageArray": {
+ "description": "filesystem usage of the VM on which the VNFC reporting the event is running",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/filesystemUsage"
+ }
+ },
+ "latencyDistribution": {
+ "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-VNF configured ranges",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/latencyBucketMeasure"
+ }
+ },
+ "meanRequestLatency": {
+ "description": "mean seconds required to respond to each request for the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "measurementInterval": {
+ "description": "interval over which measurements are being reported in seconds",
+ "type": "number"
+ },
+ "measurementsForVfScalingVersion": {
+ "description": "version of the measurementsForVfScaling block",
+ "type": "number"
+ },
+ "memoryConfigured": {
+ "description": "memory in MB configured in the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "memoryUsed": {
+ "description": "memory usage in MB of the VM on which the VNFC reporting the event is running",
+ "type": "number"
+ },
+ "numberOfMediaPortsInUse": {
+ "description": "number of media ports in use",
+ "type": "number"
+ },
+ "requestRate": {
+ "description": "peak rate of service requests per second to the VNF over the measurementInterval",
+ "type": "number"
+ },
+ "vnfcScalingMetric": {
+ "description": "represents busy-ness of the VNF from 0 to 100 as reported by the VNFC",
+ "type": "number"
+ },
+ "vNicUsageArray": {
+ "description": "usage of an array of virtual network interface cards",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/vNicUsage"
+ }
+ }
+ },
+ "required": [ "measurementInterval" ]
+ },
+ "mobileFlowFields": {
+ "description": "mobileFlow fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional mobileFlow fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "applicationType": {
+ "description": "Application type inferred",
+ "type": "string"
+ },
+ "appProtocolType": {
+ "description": "application protocol",
+ "type": "string"
+ },
+ "appProtocolVersion": {
+ "description": "application protocol version",
+ "type": "string"
+ },
+ "cid": {
+ "description": "cell id",
+ "type": "string"
+ },
+ "connectionType": {
+ "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc",
+ "type": "string"
+ },
+ "ecgi": {
+ "description": "Evolved Cell Global Id",
+ "type": "string"
+ },
+ "flowDirection": {
+ "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow",
+ "type": "string"
+ },
+ "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" },
+ "gtpProtocolType": {
+ "description": "GTP protocol",
+ "type": "string"
+ },
+ "gtpVersion": {
+ "description": "GTP protocol version",
+ "type": "string"
+ },
+ "httpHeader": {
+ "description": "HTTP request header, if the flow connects to a node referenced by HTTP",
+ "type": "string"
+ },
+ "imei": {
+ "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "imsi": {
+ "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "ipProtocolType": {
+ "description": "IP protocol type e.g., TCP, UDP, RTP...",
+ "type": "string"
+ },
+ "ipVersion": {
+ "description": "IP protocol version e.g., IPv4, IPv6",
+ "type": "string"
+ },
+ "lac": {
+ "description": "location area code",
+ "type": "string"
+ },
+ "mcc": {
+ "description": "mobile country code",
+ "type": "string"
+ },
+ "mnc": {
+ "description": "mobile network code",
+ "type": "string"
+ },
+ "mobileFlowFieldsVersion": {
+ "description": "version of the mobileFlowFields block",
+ "type": "number"
+ },
+ "msisdn": {
+ "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device",
+ "type": "string"
+ },
+ "otherEndpointIpAddress": {
+ "description": "IP address for the other endpoint, as used for the flow being reported on",
+ "type": "string"
+ },
+ "otherEndpointPort": {
+ "description": "IP Port for the reporting entity, as used for the flow being reported on",
+ "type": "number"
+ },
+ "otherFunctionalRole": {
+ "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...",
+ "type": "string"
+ },
+ "rac": {
+ "description": "routing area code",
+ "type": "string"
+ },
+ "radioAccessTechnology": {
+ "description": "Radio Access Technology e.g., 2G, 3G, LTE",
+ "type": "string"
+ },
+ "reportingEndpointIpAddr": {
+ "description": "IP address for the reporting entity, as used for the flow being reported on",
+ "type": "string"
+ },
+ "reportingEndpointPort": {
+ "description": "IP port for the reporting entity, as used for the flow being reported on",
+ "type": "number"
+ },
+ "sac": {
+ "description": "service area code",
+ "type": "string"
+ },
+ "samplingAlgorithm": {
+ "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied",
+ "type": "number"
+ },
+ "tac": {
+ "description": "transport area code",
+ "type": "string"
+ },
+ "tunnelId": {
+ "description": "tunnel identifier",
+ "type": "string"
+ },
+ "vlanId": {
+ "description": "VLAN identifier used by this flow",
+ "type": "string"
+ }
+ },
+ "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType",
+ "ipVersion", "otherEndpointIpAddress", "otherEndpointPort",
+ "reportingEndpointIpAddr", "reportingEndpointPort" ]
+ },
+ "otherFields": {
+ "description": "additional fields not reported elsewhere",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "requestError": {
+ "description": "standard request error data structure",
+ "type": "object",
+ "properties": {
+ "messageId": {
+ "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception",
+ "type": "string"
+ },
+ "text": {
+ "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1",
+ "type": "string"
+ },
+ "url": {
+ "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents",
+ "type": "string"
+ },
+ "variables": {
+ "description": "List of zero or more strings that represent the contents of the variables used by the message text",
+ "type": "string"
+ }
+ },
+ "required": [ "messageId", "text" ]
+ },
+ "stateChangeFields": {
+ "description": "stateChange fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional stateChange fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "newState": {
+ "description": "new state of the entity",
+ "type": "string",
+ "enum": [
+ "inService",
+ "maintenance",
+ "outOfService"
+ ]
+ },
+ "oldState": {
+ "description": "previous state of the entity",
+ "type": "string",
+ "enum": [
+ "inService",
+ "maintenance",
+ "outOfService"
+ ]
+ },
+ "stateChangeFieldsVersion": {
+ "description": "version of the stateChangeFields block",
+ "type": "number"
+ },
+ "stateInterface": {
+ "description": "card or port name of the entity that changed state",
+ "type": "string"
+ }
+ },
+ "required": [ "newState", "oldState", "stateInterface" ]
+ },
+ "suppressedNvPairs": {
+ "description": "List of specific NvPairsNames to suppress within a given Name-Value Field for event Throttling",
+ "type": "object",
+ "properties": {
+ "nvPairFieldName": {
+ "description": "Name of the field within which are the nvpair names to suppress",
+ "type": "string"
+ },
+ "suppressedNvPairNames": {
+ "description": "Array of nvpair names to suppress within the nvpairFieldName",
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "required": [ "nvPairFieldName", "suppressedNvPairNames" ]
+ },
+ "syslogFields": {
+ "description": "sysLog fields",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional syslog fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "eventSourceHost": {
+ "description": "hostname of the device",
+ "type": "string"
+ },
+ "eventSourceType": {
+ "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
+ "type": "string"
+ },
+ "syslogFacility": {
+ "description": "numeric code from 0 to 23 for facility--see table in documentation",
+ "type": "number"
+ },
+ "syslogFieldsVersion": {
+ "description": "version of the syslogFields block",
+ "type": "number"
+ },
+ "syslogMsg": {
+ "description": "syslog message",
+ "type": "string"
+ },
+ "syslogPri": {
+ "description": "0-192 combined severity and facility",
+ "type": "number"
+ },
+ "syslogProc": {
+ "description": "identifies the application that originated the message",
+ "type": "string"
+ },
+ "syslogProcId": {
+ "description": "a change in the value of this field indicates a discontinuity in syslog reporting",
+ "type": "number"
+ },
+ "syslogSData": {
+ "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs",
+ "type": "string"
+ },
+ "syslogSdId": {
+ "description": "0-32 char in format name@number for example ourSDID@32473",
+ "type": "string"
+ },
+ "syslogSev": {
+ "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8",
+ "type": "string"
+ },
+ "syslogTag": {
+ "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided",
+ "type": "string"
+ },
+ "syslogVer": {
+ "description": "IANA assigned version of the syslog protocol specification - typically 1",
+ "type": "number"
+ }
+ },
+ "required": [ "eventSourceType", "syslogMsg", "syslogTag" ]
+ },
+ "thresholdCrossingAlertFields": {
+ "description": "fields specific to threshold crossing alert events",
+ "type": "object",
+ "properties": {
+ "additionalFields": {
+ "description": "additional threshold crossing alert fields if needed",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/field"
+ }
+ },
+ "additionalParameters": {
+ "description": "performance counters",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/counter"
+ }
+ },
+ "alertAction": {
+ "description": "Event action",
+ "type": "string",
+ "enum": [
+ "CLEAR",
+ "CONT",
+ "SET"
+ ]
+ },
+ "alertDescription": {
+ "description": "Unique short alert description such as IF-SHUB-ERRDROP",
+ "type": "string"
+ },
+ "alertType": {
+ "description": "Event type",
+ "type": "string",
+ "enum": [
+ "CARD-ANOMALY",
+ "ELEMENT-ANOMALY",
+ "INTERFACE-ANOMALY",
+ "SERVICE-ANOMALY"
+ ]
+ },
+ "alertValue": {
+ "description": "Calculated API value (if applicable)",
+ "type": "string"
+ },
+ "associatedAlertIdList": {
+ "description": "List of eventIds associated with the event being reported",
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ "collectionTimestamp": {
+ "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "dataCollector": {
+ "description": "Specific performance collector instance used",
+ "type": "string"
+ },
+ "elementType": {
+ "description": "type of network element - internal ATT field",
+ "type": "string"
+ },
+ "eventSeverity": {
+ "description": "event severity or priority",
+ "type": "string",
+ "enum": [
+ "CRITICAL",
+ "MAJOR",
+ "MINOR",
+ "WARNING",
+ "NORMAL"
+ ]
+ },
+ "eventStartTimestamp": {
+ "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
+ "type": "string"
+ },
+ "interfaceName": {
+ "description": "Physical or logical port or card (if applicable)",
+ "type": "string"
+ },
+ "networkService": {
+ "description": "network name - internal ATT field",
+ "type": "string"
+ },
+ "possibleRootCause": {
+ "description": "Reserved for future use",
+ "type": "string"
+ },
+ "thresholdCrossingFieldsVersion": {
+ "description": "version of the thresholdCrossingAlertFields block",
+ "type": "number"
+ }
+ },
+ "required": [
+ "additionalParameters",
+ "alertAction",
+ "alertDescription",
+ "alertType",
+ "collectionTimestamp",
+ "eventSeverity",
+ "eventStartTimestamp"
+ ]
+ },
+ "vNicUsage": {
+ "description": "usage of identified virtual network interface card",
+ "type": "object",
+ "properties": {
+ "broadcastPacketsIn": { "type": "number" },
+ "broadcastPacketsOut": { "type": "number" },
+ "bytesIn": { "type": "number" },
+ "bytesOut": { "type": "number" },
+ "multicastPacketsIn": { "type": "number" },
+ "multicastPacketsOut": { "type": "number" },
+ "packetsIn": { "type": "number" },
+ "packetsOut": { "type": "number" },
+ "unicastPacketsIn": { "type": "number" },
+ "unicastPacketsOut": { "type": "number" },
+ "vNicIdentifier": { "type": "string" }
+ },
+ "required": [ "bytesIn", "bytesOut", "packetsIn", "packetsOut", "vNicIdentifier"]
+ }
+ },
+ "title": "Event Listener",
+ "type": "object",
+ "properties": {
+ "event": {"$ref": "#/definitions/event"}
+ }
+ }
+
+}
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/__init__.robot b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/__init__.robot
new file mode 100644
index 000000000..8064d6b0c
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation javatoscachecker - APIs
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/basics.robot b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/basics.robot
new file mode 100644
index 000000000..053b2824d
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/basics.robot
@@ -0,0 +1,62 @@
+*** Settings ***
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+
+
+*** Test Cases ***
+Check service up/non existent namespace
+ CreateSession checkerservice http://localhost:8080
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Get Request checkerservice /check_template/nosuchcatalog headers=${headers}
+ Should Be Equal As Strings ${resp.status_code} 404
+
+Check standalone template
+ CreateSession checkerservice http://localhost:8080
+ ${data}= Get Binary File ${CURDIR}${/}data${/}standalone.yaml
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Post Request checkerservice /check_template/ data=${data} headers=${headers}
+ Log Response received from checker ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.text} []
+
+Check standalone template with errors
+ CreateSession checkerservice http://localhost:8080
+ ${data}= Get Binary File ${CURDIR}${/}data${/}standalone_with_errors.yaml
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Post Request checkerservice /check_template/ data=${data} headers=${headers}
+ Log Response received from checker ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Not Be Equal As Strings ${resp.text} []
+
+Check schema new namespace
+ CreateSession checkerservice http://localhost:8080
+ ${data}= Get Binary File ${CURDIR}${/}data${/}test_schema.yaml
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Post Request checkerservice /check_template/test/schema.yaml data=${data} headers=${headers}
+ Log Response received from checker ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.text} []
+
+Check template in namespace
+ CreateSession checkerservice http://localhost:8080
+ ${data}= Get Binary File ${CURDIR}${/}data${/}test_template.yaml
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Post Request checkerservice /check_template/test/ data=${data} headers=${headers}
+ Log Response received from checker ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.text} []
+
+Check named template does not exist
+ CreateSession checkerservice http://localhost:8080
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Get Request checkerservice /check_template/test/nosuchtemplate.yaml headers=${headers}
+ Should Be Equal As Strings ${resp.status_code} 404
+
+Check delete existing namespace
+ CreateSession checkerservice http://localhost:8080
+ &{headers}= Create Dictionary Accept=application/json
+ ${resp}= Delete Request checkerservice /check_template/test/ headers=${headers}
+ Log Response received from checker ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone.yaml b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone.yaml
new file mode 100644
index 000000000..26797aba3
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone.yaml
@@ -0,0 +1,21 @@
+tosca_definitions_version: tosca_simple_yaml_1_0
+
+topology_template:
+ node_templates:
+ my_server:
+ type: tosca.nodes.Compute
+ capabilities:
+ # Host container properties
+ host:
+ properties:
+ num_cpus: 1
+ disk_size: 10 GB
+ mem_size: 4096 MB
+ # Guest Operating System properties
+ os:
+ properties:
+ # host Operating System image properties
+ architecture: x86_64
+ type: linux
+ distribution: rhel
+ version: 6.5
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone_with_errors.yaml b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone_with_errors.yaml
new file mode 100644
index 000000000..d5c1a1586
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/standalone_with_errors.yaml
@@ -0,0 +1,23 @@
+tosca_definitions_version: tosca_simple_yaml_1_0
+description: uses unknown capability propertya to trigger an error
+
+topology_template:
+ node_templates:
+ my_server:
+ type: tosca.nodes.Compute
+ capabilities:
+ # Host container properties
+ host:
+ properties:
+ num_cpus: 1
+ disk_size: 10 GB
+ mem_size: 4096 MB
+ virtualization_support: true
+ # Guest Operating System properties
+ os:
+ properties:
+ # host Operating System image properties
+ architecture: x86_64
+ type: linux
+ distribution: rhel
+ version: 6.5
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_schema.yaml b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_schema.yaml
new file mode 100644
index 000000000..9575e27c9
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_schema.yaml
@@ -0,0 +1,17 @@
+tosca_definitions_version: tosca_simple_yaml_1_1
+
+data_types:
+ org.onap.tosca.checker.service.Person:
+ properties:
+ firstName:
+ type: string
+ required: true
+ lastName:
+ type: string
+ required: true
+
+node_types:
+ org.onap.tosca.checker.service.Residence:
+ properties:
+ owner:
+ type: org.onap.tosca.checker.service.Person
diff --git a/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_template.yaml b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_template.yaml
new file mode 100644
index 000000000..478aec05e
--- /dev/null
+++ b/test/csit/tests/modeling-toscaparsers-javatoscachecker/APIs/data/test_template.yaml
@@ -0,0 +1,11 @@
+tosca_definitions_version: tosca_simple_yaml_1_0
+
+imports:
+ - schema: schema.yaml
+
+topology_template:
+ node_templates:
+ my_house:
+ type: org.onap.tosca.checker.service.Residence
+ properties:
+ owner: {firstName: "Serban", lastName: "Jora"}
diff --git a/test/csit/tests/policy/suite1/Policy-CSIT.robot b/test/csit/tests/policy/suite1/Policy-CSIT.robot
index 5ce1f9f73..fd5e23048 100644
--- a/test/csit/tests/policy/suite1/Policy-CSIT.robot
+++ b/test/csit/tests/policy/suite1/Policy-CSIT.robot
@@ -12,9 +12,9 @@ ${RESOURCE_PATH_CREATE} /pdp/api/createPolicy
${RESOURCE_PATH_CREATE_PUSH} /pdp/api/pushPolicy
${RESOURCE_PATH_CREATE_DELETE} /pdp/api/deletePolicy
${RESOURCE_PATH_GET_CONFIG} /pdp/api/getConfig
-${CREATE_CONFIG_VFW_TEMPLATE} ${CURDIR}/configpolicy_vFW.template
-${CREATE_CONFIG_VDNS_TEMPLATE} ${CURDIR}/configpolicy_vDNS.template
-${CREATE_CONFIG_VCPE_TEMPLATE} ${CURDIR}/configpolicy_vCPE.template
+${CREATE_CONFIG_VFW_TEMPLATE} ${CURDIR}/configpolicy_vFW_R1.template
+${CREATE_CONFIG_VDNS_TEMPLATE} ${CURDIR}/configpolicy_vDNS_R1.template
+${CREATE_CONFIG_VCPE_TEMPLATE} ${CURDIR}/configpolicy_vCPE_R1.template
${CREATE_OPS_VFW_TEMPLATE} ${CURDIR}/opspolicy_VFW_R1.template
${PUSH_POLICY_TEMPLATE} ${CURDIR}/pushpolicy.template
${CREATE_OPS_VDNS_TEMPLATE} ${CURDIR}/opspolicy_VDNS_R1.template
@@ -36,8 +36,8 @@ ${OPS_POLICY_VOLTE_NAME} VoLTE
${OPS_POLICY_VOLTE_TYPE} BRMS_PARAM
${file_path} ../testsuite/robot/assets/templates/ControlLoopDemo__closedLoopControlName.drl
${RESOURCE_PATH_UPLOAD} /pdp/api/policyEngineImport?importParametersJson=%7B%22serviceName%22%3A%22Manyu456%22%2C%20%22serviceType%22%3A%22BRMSPARAM%22%7D
-${CREATE_OPS_VCPE_TEMPLATE} ${CURDIR}/opspolicy_vCPE.template
-${CREATE_OPS_VOLTE_TEMPLATE} ${CURDIR}/opspolicy_vOLTE.template
+${CREATE_OPS_VCPE_TEMPLATE} ${CURDIR}/opspolicy_vCPE_R1.template
+${CREATE_OPS_VOLTE_TEMPLATE} ${CURDIR}/opspolicy_vOLTE_R1.template
*** Test Cases ***
@@ -220,7 +220,7 @@ Create Ops VDNS Policy
Create Config VCPE Policy
[Documentation] Create Config Policy
${randompolicyname} = Create Policy Name
- ${policyname1}= Catenate com.${randompolicyname}_vFirewall
+ ${policyname1}= Catenate com.${randompolicyname}_vCPE
${CONFIG_POLICY_VCPE_NAME}= Set Test Variable ${policyname1}
${configpolicy}= Create Dictionary policy_name=${policyname1}
${output} = Fill JSON Template File ${CREATE_CONFIG_VCPE_TEMPLATE} ${configpolicy}
@@ -239,7 +239,7 @@ Get Configs VCPE Policy
Create Ops vCPE Policy
[Documentation] Create Opertional Policy
${randompolicyname} = Create Policy Name
- ${policyname1}= Catenate com.${randompolicyname}
+ ${policyname1}= Catenate com.${randompolicyname}_vCPE
${OPS_POLICY_VCPE_NAME}= Set Test Variable ${policyname1}
${dict}= Create Dictionary policy_name=${policyname1}
${output} = Fill JSON Template File ${CREATE_OPS_VCPE_TEMPLATE} ${dict}
@@ -251,7 +251,7 @@ Create Ops vCPE Policy
Create Ops VolTE Policy
[Documentation] Create Opertional Policy
${randompolicyname} = Create Policy Name
- ${policyname1}= Catenate com.${randompolicyname}
+ ${policyname1}= Catenate com.${randompolicyname}_VoLTE
${dict}= Create Dictionary policy_name=${policyname1}
${output} = Fill JSON Template File ${CREATE_OPS_VOLTE_TEMPLATE} ${dict}
${put_resp} = Run Policy Put Request ${RESOURCE_PATH_CREATE} ${output}
diff --git a/test/csit/tests/policy/suite1/configpolicy_vCPE_R1.template b/test/csit/tests/policy/suite1/configpolicy_vCPE_R1.template
new file mode 100644
index 000000000..6dcf3c600
--- /dev/null
+++ b/test/csit/tests/policy/suite1/configpolicy_vCPE_R1.template
@@ -0,0 +1,6 @@
+{
+ "configBody": "{ \"service\": \"policy_tosca_tca\", \"location\": \"SampleServiceLocation\", \"uuid\": \"test\", \"policyName\": \"MicroServicevCPE\", \"description\": \"MicroService vCPE Policy\", \"configName\": \"SampleConfigName\", \"templateVersion\": \"OpenSource.version.1\", \"version\": \"1.0.0\", \"priority\": \"1\", \"policyScope\": \"resource=SampleResource,service=SampleService,type=SampleType,closedLoopControlName=ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\", \"riskType\": \"SampleRiskType\", \"riskLevel\": \"1\", \"guard\": \"False\", \"content\": { \"policyVersion\": \"v0.0.1\", \"threshholds\": [{ \"severity\": \"MAJOR\", \"fieldPath\": \"$$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\", \"thresholdValue\": \"0\", \"closedLoopEventStatus\": \"ABATED\", \"closedLoopControlName\": \"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\", \"version\": \"1.0.2\", \"direction\": \"EQUAL\" }, { \"severity\": \"CRITICAL\", \"fieldPath\": \"$$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value\", \"thresholdValue\": \"0\", \"closedLoopEventStatus\": \"ONSET\", \"closedLoopControlName\": \"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e\", \"version\": \"1.0.2\", \"direction\": \"GREATER\" }], \"policyName\": \"DCAE.Config_tca-hi-lo\", \"controlLoopSchemaType\": \"VNF\", \"policyScope\": \"DCAE\", \"eventName\": \"Measurement_vGMUX\" } }",
+ "policyConfigType": "MicroService",
+ "policyName": "${policy_name}",
+ "onapName": "DCAE"
+} \ No newline at end of file
diff --git a/test/csit/tests/policy/suite1/configpolicy_vDNS_R1.template b/test/csit/tests/policy/suite1/configpolicy_vDNS_R1.template
new file mode 100644
index 000000000..0a9ecc6ff
--- /dev/null
+++ b/test/csit/tests/policy/suite1/configpolicy_vDNS_R1.template
@@ -0,0 +1,6 @@
+{
+ "configBody": "{ \"service\": \"policy_tosca_tca\", \"location\": \"SampleServiceLocation\", \"uuid\": \"test\", \"policyName\": \"MicroServicevDNS\", \"description\": \"MicroService vDNS Policy\", \"configName\": \"SampleConfigName\", \"templateVersion\": \"OpenSource.version.1\", \"version\": \"1.0.0\", \"priority\": \"1\", \"policyScope\": \"resource=SampleResource,service=SampleService,type=SampleType,closedLoopControlName=ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3\", \"riskType\": \"SampleRiskType\", \"riskLevel\": \"1\", \"guard\": \"False\", \"content\": { \"policyVersion\": \"v0.0.1\", \"threshholds\": [{ \"severity\": \"CRITICAL\", \"fieldPath\": \"$$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\", \"thresholdValue\": \"300\", \"closedLoopEventStatus\": \"ONSET\", \"closedLoopControlName\": \"ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3\", \"version\": \"1.0.2\", \"direction\": \"GREATER_OR_EQUAL\" }], \"policyName\": \"DCAE.Config_tca-hi-lo\", \"controlLoopSchemaType\": \"VM\", \"policyScope\": \"DCAE\", \"eventName\": \"vLoadBalancer\" } }",
+ "policyConfigType": "MicroService",
+ "policyName": "${policy_name}",
+ "onapName": "DCAE"
+} \ No newline at end of file
diff --git a/test/csit/tests/policy/suite1/configpolicy_vFW_R1.template b/test/csit/tests/policy/suite1/configpolicy_vFW_R1.template
new file mode 100644
index 000000000..4c3617d5e
--- /dev/null
+++ b/test/csit/tests/policy/suite1/configpolicy_vFW_R1.template
@@ -0,0 +1,6 @@
+{
+ "configBody": "{\"service\": \"policy_tosca_tca\",\"location\": \"SampleServiceLocation\",\"uuid\": \"test\",\"policyName\": \"MicroServicevFirewall\",\"description\": \"MicroService vFirewall Policy\",\"configName\": \"SampleConfigName\",\"templateVersion\": \"OpenSource.version.1\",\"version\": \"1.0.0\",\"priority\": \"1\",\"policyScope\": \"resource=SampleResource,service=SampleService,type=SampleType,closedLoopControlName=ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\",\"riskType\": \"SampleRiskType\",\"riskLevel\": \"1\",\"guard\": \"False\",\"content\": {\"policyVersion\": \"v0.0.1\",\"threshholds\": [{\"severity\": \"MAJOR\",\"fieldPath\": \"$$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\",\"thresholdValue\": \"300\",\"closedLoopEventStatus\": \"ONSET\",\"closedLoopControlName\": \"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\",\"version\": \"1.0.2\",\"direction\": \"LESS_OR_EQUAL\"}, {\"severity\": \"CRITICAL\",\"fieldPath\": \"$$.event.measurementsForVfScalingFields.vNicUsageArray[*].receivedTotalPacketsDelta\",\"thresholdValue\": \"700\",\"closedLoopEventStatus\": \"ONSET\",\"closedLoopControlName\": \"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a\",\"version\": \"1.0.2\",\"direction\": \"GREATER_OR_EQUAL\"}],\"policyName\": \"DCAE.Config_tca-hi-lo\",\"controlLoopSchemaType\": \"VNF\",\"policyScope\": \"DCAE\",\"eventName\": \"vFirewallBroadcastPackets\"}}",
+ "policyConfigType": "MicroService",
+ "policyName": "${policy_name}",
+ "onapName": "DCAE"
+} \ No newline at end of file
diff --git a/test/csit/tests/policy/suite1/opspolicy_VDNS_R1.template b/test/csit/tests/policy/suite1/opspolicy_VDNS_R1.template
index b5312b500..92627cc2d 100644
--- a/test/csit/tests/policy/suite1/opspolicy_VDNS_R1.template
+++ b/test/csit/tests/policy/suite1/opspolicy_VDNS_R1.template
@@ -10,7 +10,7 @@
"RULE": {
"templateName": "ClosedLoopControlName",
"closedLoopControlName": "ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3",
- "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3%0D%0A++services%3A%0D%0A++++-+serviceName%3A+d4738992-6497-4dca-9db9%0D%0A++++++serviceInvariantUUID%3A+dc112d6e-7e73-4777-9c6f-1a7fb5fd1b6f%0D%0A++++++serviceUUID%3A+2eea06c6-e1d3-4c3a-b9c4-478c506eeedf%0D%0A++trigger_policy%3A+unique-policy-id-1-scale-up%0D%0A++timeout%3A+1200%0D%0A+%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-scale-up%0D%0A++++name%3A+Create+a+new+VF+Module%0D%0A++++description%3A%0D%0A++++actor%3A+MSO%0D%0A++++recipe%3A+VF+Module+Create%0D%0A++++target%3A%0D%0A++++++type%3A+VNF%0D%0A++++retry%3A+0%0D%0A++++timeout%3A+1200%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
- }
+ "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3%0D%0A++trigger_policy%3A+unique-policy-id-1-scale-up%0D%0A++timeout%3A+1200%0D%0A++abatement%3A+false%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-scale-up%0D%0A++++name%3A+Create+a+new+VF+Module%0D%0A++++description%3A%0D%0A++++actor%3A+SO%0D%0A++++recipe%3A+VF+Module+Create%0D%0A++++target%3A%0D%0A++++++type%3A+VNF%0D%0A++++retry%3A+0%0D%0A++++timeout%3A+1200%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
+ }
}
} \ No newline at end of file
diff --git a/test/csit/tests/policy/suite1/opspolicy_VFW_R1.template b/test/csit/tests/policy/suite1/opspolicy_VFW_R1.template
index e9893446e..a93063f70 100644
--- a/test/csit/tests/policy/suite1/opspolicy_VFW_R1.template
+++ b/test/csit/tests/policy/suite1/opspolicy_VFW_R1.template
@@ -10,8 +10,7 @@
"RULE": {
"templateName": "ClosedLoopControlName",
"closedLoopControlName": "ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a",
- "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a%0D%0A++services%3A%0D%0A++++-+serviceInvariantUUID%3A+5cfe6f4a-41bc-4247-8674-ebd4b98e35cc%0D%0A++++++serviceUUID%3A+0f40bba5-986e-4b3c-803f-ddd1b7b25f24%0D%0A++++++serviceName%3A+57e66ea7-0ed6-45c7-970f%0D%0A++trigger_policy%3A+unique-policy-id-1-modifyConfig%0D%0A++timeout%3A+1200%0D%0A+%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-modifyConfig%0D%0A++++name%3A+modify+packet+gen+config%0D%0A++++description%3A%0D%0A++++actor%3A+APPC%0D%0A++++recipe%3A+ModifyConfig%0D%0A++++target%3A%0D%0A++++++resourceID%3A+Eace933104d443b496b8.nodes.heat.vpg%0D%0A++++++type%3A+VNF%0D%0A++++retry%3A+0%0D%0A++++timeout%3A+300%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
+ "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a%0D%0A++trigger_policy%3A+unique-policy-id-1-modifyConfig%0D%0A++timeout%3A+1200%0D%0A++abatement%3A+false%0D%0A+%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-modifyConfig%0D%0A++++name%3A+modify+packet+gen+config%0D%0A++++description%3A%0D%0A++++actor%3A+APPC%0D%0A++++recipe%3A+ModifyConfig%0D%0A++++target%3A%0D%0A++++++%23+TBD+-+Cannot+be+known+until+instantiation+is+done%0D%0A++++++resourceID%3A+Eace933104d443b496b8.nodes.heat.vpg%0D%0A++++++type%3A+VNF%0D%0A++++retry%3A+0%0D%0A++++timeout%3A+300%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
}
- }
-}
-
+ }
+}
diff --git a/test/csit/tests/policy/suite1/opspolicy_vCPE_R1.template b/test/csit/tests/policy/suite1/opspolicy_vCPE_R1.template
new file mode 100644
index 000000000..36695daa4
--- /dev/null
+++ b/test/csit/tests/policy/suite1/opspolicy_vCPE_R1.template
@@ -0,0 +1,16 @@
+{
+ "policyConfigType": "BRMS_PARAM",
+ "policyName": "${policy_name}",
+ "policyDescription": "BRMS Param vCPE policy",
+ "policyScope": "com",
+ "attributes": {
+ "MATCHING": {
+ "controller" : "amsterdam"
+ },
+ "RULE": {
+ "templateName": "ClosedLoopControlName",
+ "closedLoopControlName": "ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e",
+ "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e%0D%0A++trigger_policy%3A+unique-policy-id-1-restart%0D%0A++timeout%3A+3600%0D%0A++abatement%3A+true%0D%0A+%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-restart%0D%0A++++name%3A+Restart+the+VM%0D%0A++++description%3A%0D%0A++++actor%3A+APPC%0D%0A++++recipe%3A+Restart%0D%0A++++target%3A%0D%0A++++++type%3A+VM%0D%0A++++retry%3A+3%0D%0A++++timeout%3A+1200%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
+ }
+ }
+} \ No newline at end of file
diff --git a/test/csit/tests/policy/suite1/opspolicy_vOLTE_R1.template b/test/csit/tests/policy/suite1/opspolicy_vOLTE_R1.template
new file mode 100644
index 000000000..ff5fc42c4
--- /dev/null
+++ b/test/csit/tests/policy/suite1/opspolicy_vOLTE_R1.template
@@ -0,0 +1,16 @@
+{
+ "policyConfigType": "BRMS_PARAM",
+ "policyName": "${policy_name}",
+ "policyDescription": "BRMS Param VOLTE policy",
+ "policyScope": "com",
+ "attributes": {
+ "MATCHING": {
+ "controller" : "amsterdam"
+ },
+ "RULE": {
+ "templateName": "ClosedLoopControlName",
+ "closedLoopControlName": "ControlLoop-VOLTE-2179b738-fd36-4843-a71a-a8c24c70c55b",
+ "controlLoopYaml": "controlLoop%3A%0D%0A++version%3A+2.0.0%0D%0A++controlLoopName%3A+ControlLoop-VOLTE-2179b738-fd36-4843-a71a-a8c24c70c55b%0D%0A++trigger_policy%3A+unique-policy-id-1-restart%0D%0A++timeout%3A+3600%0D%0A++abatement%3A+false%0D%0A+%0D%0Apolicies%3A%0D%0A++-+id%3A+unique-policy-id-1-restart%0D%0A++++name%3A+Restart+the+VM%0D%0A++++description%3A%0D%0A++++actor%3A+VFC%0D%0A++++recipe%3A+Restart%0D%0A++++target%3A%0D%0A++++++type%3A+VM%0D%0A++++retry%3A+3%0D%0A++++timeout%3A+1200%0D%0A++++success%3A+final_success%0D%0A++++failure%3A+final_failure%0D%0A++++failure_timeout%3A+final_failure_timeout%0D%0A++++failure_retries%3A+final_failure_retries%0D%0A++++failure_exception%3A+final_failure_exception%0D%0A++++failure_guard%3A+final_failure_guard"
+ }
+ }
+} \ No newline at end of file
diff --git a/test/csit/tests/portal-sdk/testsuites/test1.robot b/test/csit/tests/portal-sdk/testsuites/test1.robot
index ad332c5df..84579d017 100644
--- a/test/csit/tests/portal-sdk/testsuites/test1.robot
+++ b/test/csit/tests/portal-sdk/testsuites/test1.robot
@@ -6,8 +6,8 @@ Library XvfbRobot
*** Variables ***
-${PORTAL_URL} http://portal.api.simpledemo.openecomp.org:8989
-${PORTAL_ENV} /ECOMPPORTAL
+${PORTAL_URL} http://portal.api.simpledemo.onap.org:8989
+${PORTAL_ENV} /ONAPPORTAL
${PORTAL_LOGIN_URL} ${PORTAL_URL}${PORTAL_ENV}/login.htm
${PORTAL_HOME_PAGE} ${PORTAL_URL}${PORTAL_ENV}/applicationsHome
${PORTAL_MICRO_ENDPOINT} ${PORTAL_URL}${PORTAL_ENV}/commonWidgets
@@ -28,7 +28,6 @@ ${GLOBAL_VM_PRIVATE_KEY} ${EXECDIR}/robot/assets/keys/robot_ssh_private_key.pv
Portal admin Login To Portal GUI
[Documentation] Logs into Portal GUI
- ## Setup Browser Now being managed by test case
# Setup Browser
Start Virtual Display 1920 1080
Open Browser ${PORTAL_LOGIN_URL} chrome
@@ -48,9 +47,14 @@ Portal admin Login To Portal GUI
Portal Admin Navigation Application Link Tab
[Documentation] Logs into Portal GUI as Portal admin
-
Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
- Page Should Contain ONAP Portal
+ Go To ${PORTAL_HOME_PAGE}
+ Dismiss Alert accept=false
+ #Scroll Element Into View xpath=//span[@id='tab-Home']
+ #Click Element xpath=//span[@id='tab-Home']
+ #Click Element xpath=(//span[@id='tab-xDemo-App']/following::i[@class='ion-close-round'])[1]
+ Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
+
Validate SDK Sub Menu
@@ -60,8 +64,7 @@ Validate SDK Sub Menu
Page Should Contain Reports
Page Should Contain Profile
Page Should Contain Admin
- # Click Element xpath=(.//span[@id='tab-Home'])[1]
-
+
Click Sample Pages and validate sub Menu
[Documentation] Click Sample Pages
Select frame xpath=.//*[@id='tabframe-xDemo-App']
@@ -74,7 +77,6 @@ Click Sample Pages and validate sub Menu
Click Link xpath=//a[@id='parent-item-Sample-Pages']
Click Link xpath=//a[contains(@title,'Notebook')]
Element Text Should Be xpath=//h1[contains(.,'Notebook')] Notebook
- Click Link xpath=//a[@id='parent-item-Home']
Click Reports and validate sub Menu
[Documentation] Click Reports Tab
diff --git a/test/csit/tests/portal/testsuites/test1.robot b/test/csit/tests/portal/testsuites/test1.robot
index 42f88bf39..70fdcf0d6 100644
--- a/test/csit/tests/portal/testsuites/test1.robot
+++ b/test/csit/tests/portal/testsuites/test1.robot
@@ -14,9 +14,8 @@ Resource json_templater.robot
*** Variables ***
-#${PORTAL_URL} http://localhost:8989
-${PORTAL_URL} http://portal.api.simpledemo.openecomp.org:8989
-${PORTAL_ENV} /ECOMPPORTAL
+${PORTAL_URL} http://portal.api.simpledemo.onap.org:8989
+${PORTAL_ENV} /ONAPPORTAL
${PORTAL_LOGIN_URL} ${PORTAL_URL}${PORTAL_ENV}/login.htm
${PORTAL_HOME_PAGE} ${PORTAL_URL}${PORTAL_ENV}/applicationsHome
${PORTAL_MICRO_ENDPOINT} ${PORTAL_URL}${PORTAL_ENV}/commonWidgets
@@ -40,7 +39,7 @@ ${Test_LoginID} portal
${Test_Loginpwd} demo123456!
${Test_LoginPwdCheck} demo123456!
${Existing_User} portal
-${PORTAL_HEALTH_CHECK_PATH} /ECOMPPORTAL/portalApi/healthCheck
+${PORTAL_HEALTH_CHECK_PATH} /ONAPPORTAL/portalApi/healthCheck
${PORTAL_ASSETS_DIRECTORY} ${CURDIR}
${GLOBAL_APPLICATION_ID} robot-functional
${GLOBAL_PORTAL_ADMIN_USER} demo
@@ -60,7 +59,7 @@ ${GLOBAL_OPENSTACK_KEYSTONE_SERVICE_TYPE} identity
${GLOBAL_BUILD_NUMBER} 0
${GLOBAL_VM_PRIVATE_KEY} ${EXECDIR}/robot/assets/keys/robot_ssh_private_key.pvt
${jira} jira
-${RESOURCE_PATH} ECOMPPORTAL/auxapi/ticketevent
+${RESOURCE_PATH} ONAPPORTAL/auxapi/ticketevent
${portal_Template} ${CURDIR}/portal.template
@@ -790,11 +789,11 @@ Application admin Login To Portal GUI
[Documentation] Logs into Portal GUI
# Setup Browser Now being managed by test case
##Setup Browser
- Go To ${PORTAL_LOGIN_URL}
- Maximize Browser Window
- Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
- Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
- Log Logging in to ${PORTAL_URL}${PORTAL_ENV}
+# Go To ${PORTAL_LOGIN_URL}
+# Maximize Browser Window
+# Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
+# Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
+# Log Logging in to ${PORTAL_URL}${PORTAL_ENV}
# Handle Proxy Warning
Title Should Be Login
Input Text xpath=//input[@ng-model='loginId'] ${App_LoginID}
@@ -878,28 +877,32 @@ Application admin Delete Standard User Existing user
#Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
#Is Element Visible xpath=(//*[contains(.,'Portal')] )[2]
Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] Portal
- Click Image xpath=//img[@alt='Onap Logo']
- Set Selenium Implicit Wait 3000
+ #Click Image xpath=//img[@alt='Onap Logo']
+ #Set Selenium Implicit Wait 3000
Application admin Logout from Portal GUI
[Documentation] Logout from Portal GUI
Click Element xpath=//div[@id='header-user-icon']
- Click Button xpath=//button[contains(.,'Log out')]
- #Confirm Action
- Title Should Be Login
+ #Set Selenium Implicit Wait 3000
+ Click Button xpath=//button[contains(text(),'Log out')]
+ #Set Selenium Implicit Wait 3000
+ Title Should Be Login
+
+
+
Standared user Login To Portal GUI
[Documentation] Logs into Portal GUI
# Setup Browser Now being managed by test case
##Setup Browser
- Go To ${PORTAL_LOGIN_URL}
- Maximize Browser Window
- Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
- Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
- Log Logging in to ${PORTAL_URL}${PORTAL_ENV}
+# Go To ${PORTAL_LOGIN_URL}
+# Maximize Browser Window
+# Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
+# Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
+# Log Logging in to ${PORTAL_URL}${PORTAL_ENV}
# Handle Proxy Warning
Title Should Be Login
Input Text xpath=//input[@ng-model='loginId'] ${Sta_LoginID}
diff --git a/test/csit/tests/sdc/nightly/__init__.robot b/test/csit/tests/sdc/nightly/__init__.robot
new file mode 100644
index 000000000..8ee10d5f6
--- /dev/null
+++ b/test/csit/tests/sdc/nightly/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Sdc - HealthCheck
diff --git a/test/csit/tests/sdc/nightly/test1.robot b/test/csit/tests/sdc/nightly/test1.robot
new file mode 100644
index 000000000..6d4dc242d
--- /dev/null
+++ b/test/csit/tests/sdc/nightly/test1.robot
@@ -0,0 +1,16 @@
+*** Settings ***
+Library Collections
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Test Cases ***
+Get Requests health check ok
+ [Tags] get
+ CreateSession sdc-be http://localhost:8080
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request sdc-be /sdc2/rest/healthCheck headers=&{headers}
+ Should Be Equal As Strings ${resp.status_code} 200
+ @{ITEMS}= Copy List ${resp.json()['componentsInfo']}
+ : FOR ${ELEMENT} IN @{ITEMS}
+ \ Log ${ELEMENT['healthCheckComponent']} ${ELEMENT['healthCheckStatus']}
diff --git a/test/csit/tests/sdnc/healthcheck/data/data.json b/test/csit/tests/sdnc/healthcheck/data/data.json
new file mode 100644
index 000000000..583e26fb9
--- /dev/null
+++ b/test/csit/tests/sdnc/healthcheck/data/data.json
@@ -0,0 +1,4 @@
+{
+ "input" : {
+ }
+}
diff --git a/test/csit/tests/sdnc/healthcheck/data/preload.json b/test/csit/tests/sdnc/healthcheck/data/preload.json
new file mode 100644
index 000000000..b53afa859
--- /dev/null
+++ b/test/csit/tests/sdnc/healthcheck/data/preload.json
@@ -0,0 +1,41 @@
+{
+ "input": {
+ "vnf-topology-information": {
+ "vnf-topology-identifier": {
+ "service-type": "robot_demo",
+ "vnf-name": "vf_robot_module",
+ "vnf-type": "vf_robot_type",
+ "generic-vnf-name": "generic_vnf_name",
+ "generic-vnf-type": "generic_vnf_type"
+ },
+ "vnf-assignments": {
+ "availability-zones": [],
+ "vnf-networks": [],
+ "vnf-vms": []
+ },
+ "vnf-parameters": [
+ {
+ "vnf-parameter-name": "ngm1_management_ip_0",
+ "vnf-parameter-value":"127.0.0.1"
+ },
+ {
+ "vnf-parameter-name": "ngm2_management_ip_1",
+ "vnf-parameter-value":"127.0.0.2"
+ }
+ ]
+ },
+ "request-information": {
+ "request-id": "robot12",
+ "order-version": "1",
+ "notification-url": "openecomp.org",
+ "order-number": "1",
+ "request-action": "PreloadVNFRequest"
+ },
+ "sdnc-request-header": {
+ "svc-request-id": "robot12",
+ "svc-notification-url": "http:\/\/openecomp.org:8080\/adapters\/rest\/SDNCNotify",
+ "svc-action": "reserve"
+ }
+ }
+}
+
diff --git a/test/csit/tests/sdnc/healthcheck/test1.robot b/test/csit/tests/sdnc/healthcheck/test1.robot
index 1adb9a6b3..4bf3d25e7 100644
--- a/test/csit/tests/sdnc/healthcheck/test1.robot
+++ b/test/csit/tests/sdnc/healthcheck/test1.robot
@@ -1,16 +1,45 @@
*** Settings ***
-Library OperatingSystem
-Library Process
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library String
*** Variables ***
+${SDN_APIDOCS_URI} /apidoc/apis
+${SDN_HEALTHCHECK_OPERATION_PATH} /operations/SLI-API:healthcheck
+${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} /operations/VNF-API:preload-vnf-topology-operation
-${health_check} ${SCRIPTS}/health_check.sh
+*** Test Cases ***
+Healthcheck API
+ Create Session sdnc http://localhost:8282/restconf
+ ${data}= Get Binary File ${CURDIR}${/}data${/}data.json
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Post Request sdnc ${SDN_HEALTHCHECK_OPERATION_PATH} data=${data} headers=${headers}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['output']['response-code']} 200
+
+Check SLI-API
+ Create Session sdnc http://localhost:8282
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers}
+ Log ${resp.content}
+ Should Contain ${resp.content} SLI-API
-*** Test Cases ***
-Health check test case for SDNC
- [Documentation] Health check
- ${result_hc}= Run Process bash ${health_check} > log_hc.txt shell=yes
- Should Be Equal As Integers ${result_hc.rc} 0
+Check VNF-API
+ Create Session sdnc http://localhost:8282
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers}
+ Log ${resp.content}
+ Should Contain ${resp.content} VNF-API
+Test Preload
+ Create Session sdnc http://localhost:8282/restconf
+ ${data}= Get Binary File ${CURDIR}${/}data${/}preload.json
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Post Request sdnc ${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} data=${data} headers=${headers}
+ Log ${resp.content}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['output']['response-code']} 200
diff --git a/test/csit/tests/so/sanity-check/data/createE2eservice.json b/test/csit/tests/so/sanity-check/data/createE2eservice.json
index b24cc9c28..b8bac167e 100644
--- a/test/csit/tests/so/sanity-check/data/createE2eservice.json
+++ b/test/csit/tests/so/sanity-check/data/createE2eservice.json
@@ -1,30 +1,100 @@
{
- "service": {
- "name": "service",
- "description": "so_test1",
- "serviceDefId": "182834434345",
- "templateId": "5994888392",
- "parameters": {
- "domainHost": "127.0.0.1",
- "nodeTemplateName": "model:v3",
- "nodeType": "service",
- "globalSubscriberId": "49923893499",
- "subscriberName": "NEED THIS UUI - AAI",
- "requestParameters": {
- "subscriptionServiceType": "MOG",
- "userParams": [
- {
- "name": "someUserParam",
- "value": "someValue"
- },
- {
- "name": "segments",
- "value": "[\n{\n\"domainHost\":\"localhost\",\n\"nodeTemplateName\":\"IMS_NS\",\n\"nodeType\":\"tosca.nodes.nfv.NS.IMS\",\n\"segments\":[\n\n],\n\"nsParameters\":{\n\"locationConstraints\":[\n{\n\"vnfProfileId\":\"zte-CSCF-1.0\",\n\"locationConstraints\":{\n\"vimId\":\"4050083f-465f-4838-af1e-47a545222ad0\"\n}\n}\n],\n\"additionalParamForNs\":{\n\"externalDataNetworkName\":\"Flow_out_net\",\n\"m6000_mng_ip\":\"181.18.20.2\",\n\"externalCompanyFtpDataNetworkName\":\"Flow_out_net\",\n\"externalPluginManageNetworkName\":\"plugin_net_2014\",\n\"externalManageNetworkName\":\"mng_net_2017\",\n\"sfc_data_network\":\"sfc_data_net_2016\",\n\"NatIpRange\":\"210.1.1.10-210.1.1.20\",\n\"location\":\"4050083f-465f-4838-af1e-47a545222ad0\",\n\"sdncontroller\":\"9b9f02c0-298b-458a-bc9c-be3692e4f35e\"\n}\n}\n},\n{\n\"domainHost\":\"localhost\",\n\"nodeTemplateName\":\"EPC_NS\",\n\"nodeType\":\"tosca.nodes.nfv.NS.IMS\",\n\"segments\":[\n\n],\n\"nsParameters\":{\n\"locationConstraints\":[\n{\n\"vnfProfileId\":\"zte-CSCF-1.0\",\n\"locationConstraints\":{\n\"vimId\":\"4050083f-465f-4838-af1e-47a545222ad0\"\n}\n}\n],\n\"additionalParamForNs\":{\n\"externalDataNetworkName\":\"Flow_out_net\",\n\"m6000_mng_ip\":\"181.18.20.2\",\n\"externalCompanyFtpDataNetworkName\":\"Flow_out_net\",\n\"externalPluginManageNetworkName\":\"plugin_net_2014\",\n\"externalManageNetworkName\":\"mng_net_2017\",\n\"sfc_data_network\":\"sfc_data_net_2016\",\n\"NatIpRange\":\"210.1.1.10-210.1.1.20\",\n\"location\":\"4050083f-465f-4838-af1e-47a545222ad0\",\n\"sdncontroller\":\"9b9f02c0-298b-458a-bc9c-be3692e4f35e\"\n}\n}\n}\n]"
- },
- {
- "name": "nsParameters",
- "value": "{\n \"locationConstraints\": {},\n \"additionalParamForNs\": {\n \"E2EServcie.param1\": \"value1\",\n \"E2EServcie.param2\": \"value2\"\n }\n }"
- }
- ]
- }}}
+ "service":{
+"name":"so_test5",
+"description":"so_test2",
+ "serviceDefId":"60c3e96e-0970-4871-b6e0-3b6de7561519",
+"templateId":"592f9437-a9c0-4303-b9f6-c445bb7e9814",
+ "parameters":{
+ "globalSubscriberId":"123457",
+ "subscriberName":"Customer1",
+ "serviceType":"voLTE",
+"templateName":"voLTE Service:1.0",
+ "resources":[
+ {
+"resourceName":"vIMS",
+ "resourceDefId":"60c3e96e-0970-4871-b6e0-3b6de7561516",
+ "resourceId":"60c3e96e-0970-4871-b6e0-3b6de7561512",
+"nsParameters":{
+ "locationConstraints":[
+ {
+"vnfProfileId":"zte-vBAS-1.0",
+ "locationConstraints":{
+ "vimId":"4050083f-465f-4838-af1e-47a545222ad0"
+ }
+},
+ {
+ "vnfProfileId":"zte-vMME-1.0",
+ "locationConstraints":{
+ "vimId":"4050083f-465f-4838-af1e-47a545222ad0"
+ }
+ }
+ ],
+ "additionalParamForNs":{
+
+ }
+}
+ },
+ {
+ "resourceName":"vEPC",
+ "resourceDefId":"61c3e96e-0970-4871-b6e0-3b6de7561516",
+ "resourceId":"62c3e96e-0970-4871-b6e0-3b6de7561512",
+ "nsParameters":{
+"locationConstraints":[
+ {
+"vnfProfileId":"zte-CSCF-1.0",
+"locationConstraints":{
+ "vimId":"4050083f-465f-4838-af1e-47a545222ad1"
+}
+ }
+ ],
+"additionalParamForNs":{
+
+ }
+ }
+ },
+ {
+ "resourceName":"underlayvpn",
+"resourceDefId":"60c3e96e-0970-4871-b6e0-3b6de7561513",
+ "resourceId":"60c3e96e-0970-4871-b6e0-3b6de7561514",
+"nsParameters":{
+"locationConstraints":[
+
+ ],
+"additionalParamForNs":{
+ "externalDataNetworkName":"Flow_out_net",
+"m6000_mng_ip":"181.18.20.2",
+ "externalCompanyFtpDataNetworkName":"Flow_out_net",
+ "externalPluginManageNetworkName":"plugin_net_2014",
+ "externalManageNetworkName":"mng_net_2017",
+ "sfc_data_network":"sfc_data_net_2016",
+"NatIpRange":"210.1.1.10-210.1.1.20",
+"location":"4050083f-465f-4838-af1e-47a545222ad0",
+ "sdncontroller":"9b9f02c0-298b-458a-bc9c-be3692e4f35e"
+ }
+ }
+ },
+ {
+ "resourceName":"overlayvpn",
+ "resourceDefId":"60c3e96e-0970-4871-b6e0-3b6de7561517",
+ "resourceId":"60c3e96e-0970-4871-b6e0-3b6de7561518",
+"nsParameters":{
+ "locationConstraints":[
+
+ ],
+ "additionalParamForNs":{
+"externalDataNetworkName":"Flow_out_net",
+ "m6000_mng_ip":"181.18.20.2",
+ "externalCompanyFtpDataNetworkName":"Flow_out_net",
+ "externalPluginManageNetworkName":"plugin_net_2014",
+ "externalManageNetworkName":"mng_net_2017",
+ "sfc_data_network":"sfc_data_net_2016",
+"NatIpRange":"210.1.1.10-210.1.1.20",
+"location":"4050083f-465f-4838-af1e-47a545222ad0",
+ "sdncontroller":"9b9f02c0-298b-458a-bc9c-be3692e4f35e"
+}
+ }
+}
+ ]
+}
+}
} \ No newline at end of file
diff --git a/test/csit/tests/so/sanity-check/data/deleteE2eservice.json b/test/csit/tests/so/sanity-check/data/deleteE2eservice.json
index f5a87b1d3..d423dc34f 100644
--- a/test/csit/tests/so/sanity-check/data/deleteE2eservice.json
+++ b/test/csit/tests/so/sanity-check/data/deleteE2eservice.json
@@ -1,30 +1,4 @@
{
- "service": {
- "name": "instanceName",
- "description": "so_test1",
- "serviceDefId": "modelInvariantId value from SDC?",
- "templateId": "modelVersionId value from SDC??",
- "parameters": {
- "domainHost": "localhost",
- "nodeTemplateName": "modelName+:+modelVersion",
- "nodeType": "modelType?? == service",
- "globalSubscriberId": "NEED THIS UUI - AAI",
- "subscriberName": "NEED THIS UUI - AAI",
- "requestParameters": {
- "subscriptionServiceType": "MOG",
- "userParams": [
- {
- "name": "someUserParam",
- "value": "someValue"
- },
- {
- "name": "segments",
- "value": "[\n{\n\"domainHost\":\"localhost\",\n\"nodeTemplateName\":\"IMS_NS\",\n\"nodeType\":\"tosca.nodes.nfv.NS.IMS\",\n\"segments\":[\n\n],\n\"nsParameters\":{\n\"locationConstraints\":[\n{\n\"vnfProfileId\":\"zte-CSCF-1.0\",\n\"locationConstraints\":{\n\"vimId\":\"4050083f-465f-4838-af1e-47a545222ad0\"\n}\n}\n],\n\"additionalParamForNs\":{\n\"externalDataNetworkName\":\"Flow_out_net\",\n\"m6000_mng_ip\":\"181.18.20.2\",\n\"externalCompanyFtpDataNetworkName\":\"Flow_out_net\",\n\"externalPluginManageNetworkName\":\"plugin_net_2014\",\n\"externalManageNetworkName\":\"mng_net_2017\",\n\"sfc_data_network\":\"sfc_data_net_2016\",\n\"NatIpRange\":\"210.1.1.10-210.1.1.20\",\n\"location\":\"4050083f-465f-4838-af1e-47a545222ad0\",\n\"sdncontroller\":\"9b9f02c0-298b-458a-bc9c-be3692e4f35e\"\n}\n}\n},\n{\n\"domainHost\":\"localhost\",\n\"nodeTemplateName\":\"EPC_NS\",\n\"nodeType\":\"tosca.nodes.nfv.NS.IMS\",\n\"segments\":[\n\n],\n\"nsParameters\":{\n\"locationConstraints\":[\n{\n\"vnfProfileId\":\"zte-CSCF-1.0\",\n\"locationConstraints\":{\n\"vimId\":\"4050083f-465f-4838-af1e-47a545222ad0\"\n}\n}\n],\n\"additionalParamForNs\":{\n\"externalDataNetworkName\":\"Flow_out_net\",\n\"m6000_mng_ip\":\"181.18.20.2\",\n\"externalCompanyFtpDataNetworkName\":\"Flow_out_net\",\n\"externalPluginManageNetworkName\":\"plugin_net_2014\",\n\"externalManageNetworkName\":\"mng_net_2017\",\n\"sfc_data_network\":\"sfc_data_net_2016\",\n\"NatIpRange\":\"210.1.1.10-210.1.1.20\",\n\"location\":\"4050083f-465f-4838-af1e-47a545222ad0\",\n\"sdncontroller\":\"9b9f02c0-298b-458a-bc9c-be3692e4f35e\"\n}\n}\n}\n]"
- },
- {
- "name": "nsParameters",
- "value": "{\n \"locationConstraints\": {},\n \"additionalParamForNs\": {\n \"E2EServcie.param1\": \"value1\",\n \"E2EServcie.param2\": \"value2\"\n }\n }"
- }
- ]
- }}}
+ "globalSubscriberId":"388499302",
+ "serviceType" : "VoLTE"
} \ No newline at end of file
diff --git a/test/csit/tests/so/sanity-check/sanity_test_so.robot b/test/csit/tests/so/sanity-check/sanity_test_so.robot
index 065414569..7d6ddf31c 100644
--- a/test/csit/tests/so/sanity-check/sanity_test_so.robot
+++ b/test/csit/tests/so/sanity-check/sanity_test_so.robot
@@ -207,4 +207,4 @@ Delete E2EService with invalid input data
${data}= Get Binary File ${CURDIR}${/}data${/}deleteE2eserviceInvalid.json
&{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json
${resp}= Delete Request refrepo /ecomp/mso/infra/e2eServiceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000 data=${data} headers=${headers}
- Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result
+ Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result \ No newline at end of file
diff --git a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
index e0679fbd8..a2aa63949 100644
--- a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
+++ b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
@@ -9,7 +9,7 @@ Library HttpLibrary.HTTP
*** Variables ***
@{return_ok_list}= 200 201 202 204
-${queryswagger_url} /api/hwvnfm/v1/swagger.json
+${queryswagger_url} /api/huaweivnfmdriver/v1/swagger.json
${createauthtoken_url} /rest/plat/smapp/v1/oauth/token
#json files
diff --git a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
index 82cdaed7d..07bfe6979 100644
--- a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
+++ b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
@@ -1,15 +1,113 @@
-*** Settings ***
-Library Collections
-Library requests
-
-*** Test Cases ***
-Deploy BPMN File Test
- [Documentation] Check if the test bpmn file can be deployed in activiti engine
- Should Be Equal 200 200
-UnDeploy BPMN File Test
- [Documentation] Check if the test bpmn file can be undeployed in activiti engine
- Should Be Equal 404 404
-
-Exectue BPMN File Test
- [Documentation] Check if the test bpmn file can be exectued in activiti engine
- Should Be Equal 200 200 \ No newline at end of file
+*** Settings ***
+Resource ../../common.robot
+Library Collections
+Library json
+Library OperatingSystem
+Library RequestsLibrary
+Library HttpLibrary.HTTP
+
+*** Variables ***
+${MSB_IP} 127.0.0.1
+${MSB_PORT} 10550
+${ACTIVITI_IP} 127.0.0.1
+${ACTIVITI_PORT} 8804
+${MGRSERVICE_IP} 127.0.0.1
+${MGRSERVICE_PORT} 8805
+${processId} demo
+${deployid} 0
+${bmpfilepath} ${SCRIPTS}/nfvo-wfengine/demo.bpmn20.xml
+
+*** Test Cases ***
+Deploy BPMN File Test On Activiti
+ [Documentation] Check if the test bpmn file can be deployed in activiti engine
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session /activiti-rest/service/repository/deployments files=${files}
+ Should Be Equal ${resp.status_code} ${201}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["id"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On Activiti
+ [Documentation] Check if the test bpmn file can be exectued in activiti engine
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session /activiti-rest/service/runtime/process-instances ${body}
+ Should Be Equal ${resp.status_code} ${201}
+
+UnDeploy BPMN File Testt On Activiti
+ [Documentation] Check if the test bpmn file can be undeployed in activiti engine
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /activiti-rest/service/repository/deployments/${deployedId}?cascade=true
+ Should Be Equal ${resp.status_code} ${204}
+
+Deploy BPMN File Test On MgrService
+ [Documentation] Check if the test bpmn file can be deployed in Management Service
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session api/workflow/v1/package files=${files}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["deployedId"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On MgrService
+ [Documentation] Check if the test bpmn file can be exectued in Management Service
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
+
+UnDeploy BPMN File Testt On MgrService
+ [Documentation] Check if the test bpmn file can be undeployed in Management Service
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
+ Should Be Equal ${resp.status_code} ${200}
+
+Deploy BPMN File Test On MSB
+ [Documentation] Check if the test bpmn file can be deployed in activiti engine
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session api/workflow/v1/package files=${files}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["deployedId"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On MSB
+ [Documentation] Check if the test bpmn file can be exectued in MSB
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
+
+UnDeploy BPMN File Testt On MSB
+ [Documentation] Check if the test bpmn file can be undeployed in MSB
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
+ Should Be Equal ${resp.status_code} ${200}
diff --git a/test/csit/tests/vid/login/__init__.robot b/test/csit/tests/vid/login/__init__.robot
new file mode 100644
index 000000000..10faf380f
--- /dev/null
+++ b/test/csit/tests/vid/login/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation VID - Login
diff --git a/test/csit/tests/vid/login/test1.robot b/test/csit/tests/vid/login/test1.robot
new file mode 100644
index 000000000..12b23b725
--- /dev/null
+++ b/test/csit/tests/vid/login/test1.robot
@@ -0,0 +1,78 @@
+*** Settings ***
+Documentation Logins to VID
+Library ExtendedSelenium2Library
+Library Collections
+Library String
+Library RequestsLibrary
+#Library OSUtils
+Library OperatingSystem
+
+*** Variables ***
+${GLOBAL_APPLICATION_ID} robot-ete
+${GLOBAL_SELENIUM_BROWSER} chrome
+${GLOBAL_SELENIUM_BROWSER_CAPABILITIES} Create Dictionary
+${GLOBAL_SELENIUM_DELAY} 0
+${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT} 5
+${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT} 15
+${VID_ENV} /vid
+${VID_ENDPOINT} http://localhost:8080
+${VID_LOGIN_URL} ${VID_ENDPOINT}${VID_ENV}/login.htm
+${VID_HEALTHCHECK_PATH} ${VID_ENV}/api/v2/users
+${VID_HOME_URL} ${VID_ENDPOINT}${VID_ENV}/welcome.htm
+${GLOBAL_VID_USERNAME} demo
+${GLOBAL_VID_PASSWORD} Kp8bJ4SXszM0WX
+
+
+*** Test Cases ***
+Login To VID GUI
+ [Documentation] Logs in to VID GUI
+ # Setup Browser Now being managed by test case
+ Setup Browser
+ Go To ${VID_LOGIN_URL}
+ #Maximize Browser Window
+ Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
+ Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
+ Log Logging in to ${VID_ENDPOINT}${VID_ENV}
+ #Handle Proxy Warning
+ Title Should Be Login
+ Input Text xpath=//input[@id='loginId'] ${GLOBAL_VID_USERNAME}
+ Input Password xpath=//input[@id='password'] ${GLOBAL_VID_PASSWORD}
+ Click Button xpath=//input[@id='loginBtn']
+ Wait Until Page Contains Welcome to VID ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
+ Log Logged in to ${VID_ENDPOINT}${VID_ENV}
+
+
+*** Keywords ***
+Setup Browser
+ [Documentation] Sets up browser based upon the value of ${GLOBAL_SELENIUM_BROWSER}
+ Run Keyword If '${GLOBAL_SELENIUM_BROWSER}' == 'firefox' Setup Browser Firefox
+ Run Keyword If '${GLOBAL_SELENIUM_BROWSER}' == 'chrome' Setup Browser Chrome
+ Log Running with ${GLOBAL_SELENIUM_BROWSER}
+
+Setup Browser Firefox
+ ${dc} Evaluate sys.modules['selenium.webdriver'].DesiredCapabilities.FIREFOX sys, selenium.webdriver
+ Set To Dictionary ${dc} elementScrollBehavior 1
+ Create Webdriver Firefox desired_capabilities=${dc}
+ Set Global Variable ${GLOBAL_SELENIUM_BROWSER_CAPABILITIES} ${dc}
+
+Setup Browser Chrome
+ #${os}= Get Normalized Os
+ #Log Normalized OS=${os}
+ ${chrome options}= Evaluate sys.modules['selenium.webdriver'].ChromeOptions() sys
+ Call Method ${chrome options} add_argument no-sandbox
+ ${dc} Evaluate sys.modules['selenium.webdriver'].DesiredCapabilities.CHROME sys, selenium.webdriver
+ Set To Dictionary ${dc} elementScrollBehavior 1
+ Create Webdriver Chrome chrome_options=${chrome_options} desired_capabilities=${dc}
+ Set Global Variable ${GLOBAL_SELENIUM_BROWSER_CAPABILITIES} ${dc}
+
+Handle Proxy Warning
+ [Documentation] Handle Intermediate Warnings from Proxies
+ ${status} ${data}= Run Keyword And Ignore Error Variable Should Exist \${GLOBAL_PROXY_WARNING_TITLE}
+ Return From Keyword if '${status}' != 'PASS'
+ ${status} ${data}= Run Keyword And Ignore Error Variable Should Exist \${GLOBAL_PROXY_WARNING_CONTINUE_XPATH}
+ Return From Keyword if '${status}' != 'PASS'
+ Return From Keyword if "${GLOBAL_PROXY_WARNING_TITLE}" == ''
+ Return From Keyword if "${GLOBAL_PROXY_WARNING_CONTINUE_XPATH}" == ''
+ ${test} ${value}= Run keyword and ignore error Title Should Be ${GLOBAL_PROXY_WARNING_TITLE}
+ Run keyword If '${test}' == 'PASS' Click Element xpath=${GLOBAL_PROXY_WARNING_CONTINUE_XPATH}
+ \ No newline at end of file
diff --git a/test/csit/tests/vnfsdk-marketplace/provision/enterprise2DC.csar b/test/csit/tests/vnfsdk-marketplace/provision/enterprise2DC.csar
index f18d52d60..0960b20aa 100644
--- a/test/csit/tests/vnfsdk-marketplace/provision/enterprise2DC.csar
+++ b/test/csit/tests/vnfsdk-marketplace/provision/enterprise2DC.csar
Binary files differ
diff --git a/test/ete/labs/windriver/Integration-Stable-openrc.sh b/test/ete/labs/windriver/Integration-Jenkins-openrc.sh
index 1d68adc25..ca2e2c2cf 100644
--- a/test/ete/labs/windriver/Integration-Stable-openrc.sh
+++ b/test/ete/labs/windriver/Integration-Jenkins-openrc.sh
@@ -15,8 +15,8 @@ export OS_AUTH_URL=http://10.12.25.2:5000/v3
# With the addition of Keystone we have standardized on the term **project**
# as the entity that owns the resources.
-export OS_PROJECT_ID=3583253e932845a09cd4c8ca2f31d095
-export OS_PROJECT_NAME="Integration-Stable"
+export OS_PROJECT_ID="09d8566ea45e43aa974cf447ed591d77"
+export OS_PROJECT_NAME="Integration-Jenkins"
export OS_USER_DOMAIN_NAME="Default"
if [ -z "$OS_USER_DOMAIN_NAME" ]; then unset OS_USER_DOMAIN_NAME; fi
@@ -26,20 +26,15 @@ unset OS_TENANT_NAME
# In addition to the owning entity (tenant), OpenStack stores the entity
# performing the action as the **user**.
-export OS_USERNAME="gary_wu"
-
-# Remote Openstack clients will need to set this environment if
-# connecting to an HTTPS enabled endpoint
-CERT_MSG="Please enter a path for your CA certificate pem file, \
-or press enter if you are not using HTTPS "
-read -p "$CERT_MSG" OS_CACERT_INPUT
-if [ ! -z "$OS_CACERT_INPUT" ]
-then
- export OS_CACERT=$(readlink -f $OS_CACERT_INPUT)
+if [ -z ${OS_USERNAME_INPUT+x} ]; then
+ read -sp "Please enter your OpenStack Username for project $OS_PROJECT_NAME: " OS_USERNAME_INPUT
fi
+export OS_USERNAME=$OS_USERNAME_INPUT
# With Keystone you pass the keystone password.
-read -sp "Please enter your OpenStack Password for project $OS_PROJECT_NAME as user $OS_USERNAME: " OS_PASSWORD_INPUT
+if [ -z ${OS_PASSWORD_INPUT+x} ]; then
+ read -sp "Please enter your OpenStack Password for project $OS_PROJECT_NAME as user $OS_USERNAME: " OS_PASSWORD_INPUT
+fi
export OS_PASSWORD=$OS_PASSWORD_INPUT
# If your configuration has multiple regions, we set that information here.
diff --git a/test/ete/labs/windriver/Integration-SB-04-openrc.sh b/test/ete/labs/windriver/Integration-SB-04-openrc.sh
new file mode 100644
index 000000000..e7cee93b9
--- /dev/null
+++ b/test/ete/labs/windriver/Integration-SB-04-openrc.sh
@@ -0,0 +1,49 @@
+#!/usr/bin/env bash
+
+# To use an OpenStack cloud you need to authenticate against the Identity
+# service named keystone, which returns a **Token** and **Service Catalog**.
+# The catalog contains the endpoints for all services the user/tenant has
+# access to - such as Compute, Image Service, Identity, Object Storage, Block
+# Storage, and Networking (code-named nova, glance, keystone, swift,
+# cinder, and neutron).
+#
+# *NOTE*: Using the 3 *Identity API* does not necessarily mean any other
+# OpenStack API is version 3. For example, your cloud provider may implement
+# Image API v1.1, Block Storage API v2, and Compute API v2.0. OS_AUTH_URL is
+# only for the Identity API served through keystone.
+export OS_AUTH_URL=http://10.12.25.2:5000/v3
+
+# With the addition of Keystone we have standardized on the term **project**
+# as the entity that owns the resources.
+export OS_PROJECT_ID="d570c718cbc545029f40e50b75eb13df"
+export OS_PROJECT_NAME="Integration-SB-04"
+export OS_USER_DOMAIN_NAME="Default"
+if [ -z "$OS_USER_DOMAIN_NAME" ]; then unset OS_USER_DOMAIN_NAME; fi
+
+# unset v2.0 items in case set
+unset OS_TENANT_ID
+unset OS_TENANT_NAME
+
+# In addition to the owning entity (tenant), OpenStack stores the entity
+# performing the action as the **user**.
+# In addition to the owning entity (tenant), OpenStack stores the entity
+# performing the action as the **user**.
+if [ -z ${OS_USERNAME_INPUT+x} ]; then
+ read -sp "Please enter your OpenStack Username for project $OS_PROJECT_NAME: " OS_USERNAME_INPUT
+fi
+export OS_USERNAME=$OS_USERNAME_INPUT
+
+# With Keystone you pass the keystone password.
+if [ -z ${OS_PASSWORD_INPUT+x} ]; then
+ read -sp "Please enter your OpenStack Password for project $OS_PROJECT_NAME as user $OS_USERNAME: " OS_PASSWORD_INPUT
+fi
+export OS_PASSWORD=$OS_PASSWORD_INPUT
+
+# If your configuration has multiple regions, we set that information here.
+# OS_REGION_NAME is optional and only valid in certain environments.
+export OS_REGION_NAME="RegionOne"
+# Don't leave a blank variable, unset it if it was empty
+if [ -z "$OS_REGION_NAME" ]; then unset OS_REGION_NAME; fi
+
+export OS_INTERFACE=public
+export OS_IDENTITY_API_VERSION=3
diff --git a/test/ete/labs/windriver/onap.env b/test/ete/labs/windriver/onap-openstack-template.env
index 5eb5d367f..72df4f654 100644
--- a/test/ete/labs/windriver/onap.env
+++ b/test/ete/labs/windriver/onap-openstack-template.env
@@ -8,12 +8,12 @@ parameters:
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
+ public_net_name: external
+
ubuntu_1404_image: ubuntu-14-04-cloud-amd64
ubuntu_1604_image: ubuntu-16-04-cloud-amd64
- centos_7_image: CentOS-7
-
flavor_small: m1.small
flavor_medium: m1.medium
@@ -24,18 +24,12 @@ parameters:
flavor_xxlarge: m1.xxlarge
- security_group: default
-
vm_base_name: onap
key_name: onap_key
- dcae_key_name: dcae_key
-
pub_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh
- dcae_pub_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC94pcIva90rzXumORjMCf16qjVrn5+ykfW6uMcn+DPlifU+ihtqx4Up26v1gVZyCuY8BFmbhm4YGxnRykNfi71kYbWOasUFzY86dtptCuLVMRLsMYWwOA3sHQ5IcUYNbWmChh4LyOIfhy4p2atCmB1UUkQf7Icg8LokCIcdI3IohlNko50t9KOIhBvqC7Bnegnl6zpoYdtYQppdgp5xesdhYRrdIGmT86cH2QbImpcR3tMTty7SLsw5WdTSfUFwfm76s07ivUDfCRMCJoWcxQeh6my7P7BkfDYMVIYRyfWmoIf2iv3UrqahHTcCiL79SRiD0iCt0K2SEgGsVyCX2jf
-
nexus_repo: https://nexus.onap.org/content/sites/raw
nexus_docker_repo: nexus3.onap.org:10001
@@ -48,11 +42,13 @@ parameters:
artifacts_version: 1.1.0-SNAPSHOT
- openstack_tenant_id: SAMPLE
+ openstack_tenant_id: ${OS_PROJECT_ID}
+
+ openstack_tenant_name: ${OS_PROJECT_NAME}
- openstack_username: SAMPLE
+ openstack_username: ${OS_USERNAME}
- openstack_api_key: SAMPLE
+ openstack_api_key: ${OS_PASSWORD}
openstack_auth_method: password
@@ -71,9 +67,10 @@ parameters:
# #
######################
- dns_list: 8.8.8.8
+ dns_list: ["10.12.25.5", "8.8.8.8"]
external_dns: 8.8.8.8
oam_network_cidr: 10.0.0.0/16
+ dns_forwarder: 10.12.25.5
### Private IP addresses ###
@@ -98,11 +95,11 @@ parameters:
clamp_ip_addr: 10.0.12.1
openo_ip_addr: 10.0.14.1
- dcae_coll_float_ip: 10.12.0.32
- dcae_db_float_ip: 10.12.0.24
- dcae_hdp1_float_ip: 10.12.0.30
- dcae_hdp2_float_ip: 10.12.0.33
- dcae_hdp3_float_ip: 10.12.0.15
+# dcae_coll_float_ip: PUT DCAE COLLECTOR FLOATING IP HERE
+# dcae_db_float_ip: PUT DCAE DATABASE FLOATING IP HERE
+# dcae_hdp1_float_ip: PUT DCAE HADOOP VM1 FLOATING IP HERE
+# dcae_hdp2_float_ip: PUT DCAE HADOOP VM2 FLOATING IP HERE
+# dcae_hdp3_float_ip: PUT DCAE HADOOP VM3 FLOATING IP HERE
###########################
# #
@@ -110,20 +107,57 @@ parameters:
# #
###########################
- dcae_base_environment: 1-NIC-FLOATING-IPS
-
- dcae_zone: ZONE
-
- dcae_state: STATE
-
- nexus_repo_root: https://nexus.onap.org
-
- nexus_url_snapshot: https://nexus.onap.org/content/repositories/snapshots
-
- gitlab_branch: master
-
- dcae_code_version: 1.1.0
-
+# dcae_base_environment: 1-NIC-FLOATING-IPS
+
+# dcae_zone: ZONE
+
+# dcae_state: STATE
+
+# nexus_repo_root: https://nexus.onap.org
+
+# nexus_url_snapshot: https://nexus.onap.org/content/repositories/snapshots
+
+# gitlab_branch: master
+
+# dcae_code_version: 1.1.0
+
+ dnsaas_config_enabled: true
+ dnsaas_region: RegionOne
+ dnsaas_keystone_url: http://10.12.25.5:5000/v3
+ dnsaas_tenant_name: ${OS_PROJECT_NAME}
+ dnsaas_username: ${OS_USERNAME}
+ dnsaas_password: ${OS_PASSWORD}
+ dcae_domain: dcaeg2.onap.org
+ dcae_keystone_url: "http://10.0.14.1/api/multicloud-titanium_cloud/v0/pod25_RegionOne/identity/v2.0"
+ dcae_centos_7_image: CentOS-7
+ dcae_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh
+ dcae_private_key: '-----BEGIN RSA PRIVATE KEY-----\n
+MIIEpQIBAAKCAQEAylw4KKN/ljqnFBvP+blG5PNfsnM4MAuGPMsE5rkKmzcZWNaE\n
+NGMXTFKlJ4YrUl7OUv8kbgFTmB8BoNpgrNtKACDaz/psQSOeOADCG/YrT4wrYKrR\n
+NhFqOjJpxRmxweEsd14qBOxeFT7Ie42qbCMMzo260HvjLmtUxkOXeJ3xDkGmoJVy\n
+yzxX7nO1m4WyWyukO6x6mX0XDsADF4A6AapcqinoisJ7pnXaNkcjU/JY2Jrwem7s\n
++ypzIp86O6gdLpLVU9ORR/UYNAk1h+Z6K5Rual4D9mrpC9IJNaYfIgLe7mC39ZLa\n
+fiySNoGhei9P6pYvRJlQki69bid/EPAgX5YZIQIDAQABAoIBAQClDekkhI9ZqseC\n
+qFjPuKaxsizZMg+faJb6WSHLSxzyk1OSWY6F6FklgLeC8HW/fuLNYZyGOYDEsG20\n
+lMqL02Wdiy7OutS3oOS5iyzIf9a90HfFJi706el6RIpvINETcaXCS0T8tQrcS1Rd\n
+KqTaBRC6HXJGAPbBcvw3pwQSdskatU6a/Kt2a3x6DsqqinQcgEB/SbrDaJCUX9sb\n
+F2HVUwdq7aZK1Lk0ozr1FID9mrhjwWuQ6XC+vjG0FqtyXeMpR5iaQ73hex3FXQ8z\n
+OjkFbMwuHWSh1DSx70r5yFrrBqwQKnMsBqx4QDRf3fIENUnWviaL+n+gwcXA07af\n
+4kaNUFUtAoGBAPuNNRAGhZnyZ9zguns9PM56nmeMUikV5dPN2DTbQb79cpfV+7pC\n
+6PeSH/dTKFLz62d6qAM2EsNXQvewf8fipBVBRPsRqKOv+uepd01dHNy62I5B+zRm\n
+be9Kbe+EN60qdzvyPM+2hV6CnvGv1dirimS9pu6RrxD2Rmz1ectnJE+rAoGBAM3w\n
+UbSEemyZ6EKjck2RfdipzY0MNBnIZ2cUqHh8mmPXjdTLzpXb9vmPbHb01Qwo8MP+\n
+gMnTbTBOzyNAaHdIrCO9FHW6C85j3ot5Yzcr+EcBVcua+7KHU0Sgn44JNH8DisJ7\n
+Y63UP/1Xb4d1/QvHfxYy3WOvvRdVZ7pPo8JNX95jAoGAIe5CIg8/JizUZa7KeKUh\n
+9pgDleQPkQsrHQ6/AyIwFBsLwf9THSS5V+uV9D57SfUs46Bf2U8J6N90YQSlt8iS\n
+aWuManFPVgT+yxDIzt6obf2mCEpOIBtQ6N4ZRh2HhQwdWTCrkzkDdGQaHG+jYL6C\n
+xGPwiG2ON7OAfGIAM7eN5lECgYEAhoRLWlaOgRGnHKAWsYQvZ67CjTdDcPPuVu6v\n
+fMQnNMA/7JeTwV+E205L0wfpgZ/cZKmBBlQMJlnUA3q2wfO+PTnse1mjDJU/cGtB\n
+22/lJLxChlQdxGeQhGtGzUhF+hEeOhrO6WSSx7CtMRZoy6Dr6lwfMFZCdVNcBd6v\n
+YOOZk3ECgYEAseUKGb6E80XTVVNziyuiVbQCsI0ZJuRfqMZ2IIDQJU9u6AnGAway\n
+itqHbkGsmDT+4HUz01+1JKnnw42RdSrHdU/LaOonD+RIGqe2x800QXzqASKLdCXr\n
+y7RoiFqJtkdFQykzJemA+xOXvHLgKi/MXFsU90PCD0VJKLj8vwpX78Y=\n
+-----END RSA PRIVATE KEY-----'
################################
# #
@@ -137,7 +171,7 @@ parameters:
mr_branch: master
dcae_branch: master
policy_branch: master
- portal_branch: master
+ portal_branch: release-1.3.0
robot_branch: master
sdc_branch: master
sdnc_branch: master
@@ -163,7 +197,7 @@ parameters:
uui_docker: latest
esr_docker: latest
dgbuilder_docker: 0.1-STAGING-latest
- cli_docker: 1.1-STAGING-latest
+ cli_docker: v1.1.0
#####################
# #
diff --git a/test/ete/scripts/deploy-onap.sh b/test/ete/scripts/deploy-onap.sh
index 3c69e15e8..4802e02ce 100755
--- a/test/ete/scripts/deploy-onap.sh
+++ b/test/ete/scripts/deploy-onap.sh
@@ -1,53 +1,63 @@
-#!/bin/bash
-
-if [ -z "$OS_AUTH_URL" ] || [ -z "$OS_USERNAME" ]
-then
- echo "ERROR: OpenStack environment variables not set. Please source your OpenStack RC script first."
- exit 1
-fi
-
+#!/bin/bash -x
if [ -z "$WORKSPACE" ]; then
export WORKSPACE=`git rev-parse --show-toplevel`
fi
+source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
+# Delete all existing stacks
+STACKS=$(openstack stack list -c "Stack Name" -f value)
-# Assume that if ROBOT_VENV is set, we don't need to reinstall robot
-if [ -f ${WORKSPACE}/env.properties ]; then
- source ${WORKSPACE}/env.properties
-fi
-
-if [ ! -z "$ONAP_VENV" ] && [ -f "$ONAP_VENV/bin/activate" ]; then
- source ${ONAP_VENV}/bin/activate
+if [ ! -z "${STACKS}" ]; then
+ echo "Deleting Stacks ${STACKS}"
+ openstack stack delete -y $STACKS
+ for STACK in ${STACKS}; do
+ until [ "DELETE_IN_PROGRESS" != "$(openstack stack show -c stack_status -f value $STACK)" ]; do
+ sleep 30
+ done
+ done
else
- ONAP_VENV=$(mktemp -d --suffix=_onap_venv)
- virtualenv ${ONAP_VENV}
- source ${ONAP_VENV}/bin/activate
-
- pip install --upgrade pip
- pip install --upgrade python-openstackclient python-heatclient
-
- echo "ONAP_VENV=${ONAP_VENV}" >> $WORKSPACE/env.properties
+ echo "No existing stacks to delete."
fi
-echo "ONAP_VENV=${ONAP_VENV}"
-if [ -z "$ONAP_WORKDIR" ]; then
- ONAP_WORKDIR=$(mktemp -d --suffix=_onap_workdir)
- echo "ONAP_WORKDIR=${ONAP_WORKDIR}" >> $WORKSPACE/env.properties
-fi
-echo "ONAP_WORKDIR=${ONAP_WORKDIR}"
-if [ ! -d ${ONAP_WORKDIR}/demo ]; then
- git clone http://gerrit.onap.org/r/demo ${ONAP_WORKDIR}/demo
-else
- pushd ${ONAP_WORKDIR}/demo
- git pull
- popd
-fi
STACK="ete-$(uuidgen | cut -c-8)"
-echo "Stack Name: ${STACK}"
-openstack stack create -t ${ONAP_WORKDIR}/demo/heat/ONAP/onap_openstack.yaml -e ${WORKSPACE}/test/ete/labs/windriver/onap.env $STACK
+echo "New Stack Name: ${STACK}"
+
+
+cp ${ONAP_WORKDIR}/demo/heat/ONAP/onap_openstack.env ${WORKSPACE}/test/ete/labs/windriver/onap-openstack-demo.env
+envsubst < ${WORKSPACE}/test/ete/labs/windriver/onap-openstack-template.env > ${WORKSPACE}/test/ete/labs/windriver/onap-openstack.env
+#diff ${WORKSPACE}/test/ete/labs/windriver/onap-openstack-template.env ${WORKSPACE}/test/ete/labs/windriver/onap-openstack.env
+
+openstack stack create -t ${ONAP_WORKDIR}/demo/heat/ONAP/onap_openstack.yaml -e ${WORKSPACE}/test/ete/labs/windriver/onap-openstack.env $STACK
+
+while [ "CREATE_IN_PROGRESS" == "$(openstack stack show -c stack_status -f value $STACK)" ]; do
+ sleep 15
+done
+
+STATUS=$(openstack stack show -c stack_status -f value $STACK)
+echo $STATUS
+if [ "CREATE_COMPLETE" != "$STATUS" ]; then
+ exit 1
+fi
+
+
+# wait until Robot VM initializes
+ROBOT_IP=$($WORKSPACE/test/ete/scripts/get-floating-ip.sh onap-robot)
+echo "ROBOT_IP=${ROBOT_IP}"
+
+if [ "" == "${ROBOT_IP}" ]; then
+ exit 1
+fi
+
+ssh-keygen -R ${ROBOT_IP}
+
+SSH_KEY=~/.ssh/onap_key
+until ssh -o StrictHostKeychecking=no -i ${SSH_KEY} root@${ROBOT_IP} "docker ps | grep -q openecompete_container"
+do
+ sleep 1m
+done
diff --git a/test/ete/scripts/get-floating-ip.sh b/test/ete/scripts/get-floating-ip.sh
new file mode 100755
index 000000000..55854e73f
--- /dev/null
+++ b/test/ete/scripts/get-floating-ip.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+# Get floating IP assigned to a server name
+
+PORT_ID=$(openstack server show -f json $1 | python -c 'import sys, json; print json.load(sys.stdin)["wrs-if:nics"][0]["nic1"]["port_id"]')
+FLOATING_IP=$(openstack floating ip list -f json --port $PORT_ID | python -c 'import sys, json; print json.load(sys.stdin)[0]["Floating IP Address"]')
+echo $FLOATING_IP
diff --git a/test/ete/scripts/install_openstack_cli.sh b/test/ete/scripts/install_openstack_cli.sh
new file mode 100755
index 000000000..a6a0438f9
--- /dev/null
+++ b/test/ete/scripts/install_openstack_cli.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+if [ -z "$OS_AUTH_URL" ] || [ -z "$OS_USERNAME" ]
+then
+ echo "ERROR: OpenStack environment variables not set. Please source your OpenStack RC script first."
+ exit 1
+fi
+
+
+if [ -z "$WORKSPACE" ]; then
+ export WORKSPACE=`git rev-parse --show-toplevel`
+fi
+
+
+
+# Assume that if ROBOT_VENV is set, we don't need to reinstall robot
+if [ -f ${WORKSPACE}/env.properties ]; then
+ source ${WORKSPACE}/env.properties
+fi
+
+if [ ! -z "$ONAP_VENV" ] && [ -f "$ONAP_VENV/bin/activate" ]; then
+ source ${ONAP_VENV}/bin/activate
+else
+ ONAP_VENV=$(mktemp -d --suffix=_onap_venv)
+ virtualenv ${ONAP_VENV}
+ source ${ONAP_VENV}/bin/activate
+
+ pip install --upgrade pip
+ pip install --upgrade python-openstackclient python-heatclient
+
+ echo "ONAP_VENV=${ONAP_VENV}" >> $WORKSPACE/env.properties
+fi
+echo "ONAP_VENV=${ONAP_VENV}"
+
+if [ -z "$ONAP_WORKDIR" ]; then
+ ONAP_WORKDIR=$(mktemp -d --suffix=_onap_workdir)
+ echo "ONAP_WORKDIR=${ONAP_WORKDIR}" >> $WORKSPACE/env.properties
+fi
+echo "ONAP_WORKDIR=${ONAP_WORKDIR}"
+if [ ! -d ${ONAP_WORKDIR}/demo ]; then
+ git clone https://gerrit.onap.org/r/demo ${ONAP_WORKDIR}/demo
+else
+ pushd ${ONAP_WORKDIR}/demo
+ git pull
+ popd
+fi
+
diff --git a/test/ete/scripts/remote/run-robot.sh b/test/ete/scripts/remote/run-robot.sh
new file mode 100755
index 000000000..162acbe24
--- /dev/null
+++ b/test/ete/scripts/remote/run-robot.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -x
+
+cd /opt
+
+docker ps | grep -q openecompete_container
+if [ ! $? -eq 0 ]; then
+ echo "Robot not initialized"
+ exit 2
+fi
+
+if [ ! -d eteshare/logs/demo ]; then
+ echo $OS_PROJECT_ID > /opt/config/openstack_tenant_id.txt
+ echo $OS_USERNAME > /opt/config/openstack_username.txt
+ echo $OS_PASSWORD > /opt/config/openstack_password.txt
+ /bin/bash /opt/eteshare/config/vm_config2robot.sh
+ # set robot VM http server password
+ echo "admin" | /opt/demo.sh init_robot
+fi
+
+/opt/ete.sh health
diff --git a/test/ete/scripts/run-healthcheck.sh b/test/ete/scripts/run-healthcheck.sh
new file mode 100755
index 000000000..2f0f014d3
--- /dev/null
+++ b/test/ete/scripts/run-healthcheck.sh
@@ -0,0 +1,26 @@
+#!/bin/bash -x
+
+SSH_KEY=~/.ssh/onap_key
+
+if [ -z "$WORKSPACE" ]; then
+ export WORKSPACE=`git rev-parse --show-toplevel`
+fi
+
+source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
+
+cd $WORKSPACE/test/ete/scripts
+
+ROBOT_IP=$(./get-floating-ip.sh onap-robot)
+echo "ROBOT_IP=${ROBOT_IP}"
+
+if [ "" == "${ROBOT_IP}" ]; then
+ exit 1
+fi
+
+ssh-keygen -R ${ROBOT_IP}
+
+ssh -o StrictHostKeychecking=no -i ${SSH_KEY} root@${ROBOT_IP} "OS_PROJECT_ID=$OS_PROJECT_ID OS_USERNAME=$OS_USERNAME OS_PASSWORD=$OS_PASSWORD bash -s" < ./remote/run-robot.sh
+
+LOG_DIR=$(ssh -o StrictHostKeychecking=no -i ${SSH_KEY} root@${ROBOT_IP} "ls -1t /opt/eteshare/logs | head -1")
+echo "Browse Robot results at http://${ROBOT_IP}:88/logs/${LOG_DIR}/"
+rsync -e "ssh -i ${SSH_KEY}" -avPz root@${ROBOT_IP}:/opt/eteshare/logs/${LOG_DIR}/ $WORKSPACE/archives/
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
index 714d3a340..5977a8a2f 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
@@ -93,7 +93,7 @@ public class MockApplication {
// Register extension
options.extensions("org.onap.integration.test.mocks.sniroemulator.extension.Webhooks");
// Register notifier
- options.notifier(new ConsoleNotifier(true));
+ options.notifier(new ConsoleNotifier(true));
wireMockServer = new WireMockServer(options);
wireMockServer.enableRecordMappings(mappingsFileSource, filesFileSource);
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
index 60592b3f0..304971572 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
@@ -74,6 +74,11 @@ public class WebhookDefinition {
return body.isBinary() ? null : body.asString();
}
+ public String getBase64BodyAsString() {
+ return body.asString();
+ }
+
+
@JsonIgnore
public byte[] getBinaryBody() {
return body.asBytes();
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
index e3fc286cb..78fb735d2 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
@@ -19,6 +19,8 @@
*/
package org.onap.integration.test.mocks.sniroemulator.extension;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
import com.github.tomakehurst.wiremock.common.Notifier;
import com.github.tomakehurst.wiremock.core.Admin;
import com.github.tomakehurst.wiremock.extension.Parameters;
@@ -32,8 +34,11 @@ import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.util.EntityUtils;
+import com.github.tomakehurst.wiremock.common.Json;
+
import java.io.IOException;
+import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -46,6 +51,9 @@ public class Webhooks extends PostServeAction {
private final ScheduledExecutorService scheduler;
private final HttpClient httpClient;
+ private String tunnelResourceId = "NONE";
+ private String brgResourceId = "NONE";
+ private String vgResourceId = "NONE";
public Webhooks() {
scheduler = Executors.newScheduledThreadPool(10);
@@ -62,10 +70,40 @@ public class Webhooks extends PostServeAction {
final WebhookDefinition definition = parameters.as(WebhookDefinition.class);
final Notifier notifier = notifier();
+
scheduler.schedule(
new Runnable() {
@Override
public void run() {
+ JsonNode node = Json.node(serveEvent.getRequest().getBodyAsString());
+ // set callback url from SO request
+ String callBackUrl = node.get("requestInfo").get("callbackUrl").asText();
+ notifier.info("!!! Call Back Url : \n" + callBackUrl);
+ definition.withUrl(callBackUrl);
+
+ // set servicesResourceIds for each resource from SO request placement Demand
+ //System.out.println ("PI: \n" + node.textValue());
+ JsonNode placementDemandList = node.get("placementInfo").get("demandInfo").get("placementDemand");
+ if (placementDemandList !=null && placementDemandList.isArray()){
+ for (int i=0;i<placementDemandList.size();i++){
+ JsonNode resourceInfo = placementDemandList.get(i);
+ String resourceModuleName = resourceInfo.get("resourceModuleName").asText();
+ if (resourceModuleName.toLowerCase().matches("(.*)tunnel(.*)")){
+ tunnelResourceId = resourceInfo.get("serviceResourceId").asText();
+ } else if (resourceModuleName.toLowerCase().matches("(.*)brg(.*)")) {
+ brgResourceId = resourceInfo.get("serviceResourceId").asText();
+ }else {
+ vgResourceId = resourceInfo.get("serviceResourceId").asText();
+ }
+ }
+ }
+
+ String stubbedBodyStr = definition.getBase64BodyAsString();
+ String newBodyStr = stubbedBodyStr.replace("TUNNEL-RESOURCE-ID-REPLACE",tunnelResourceId).replace("VGW-RESOURCE-ID-REPLACE",vgResourceId).replace("BRG-RESOURCE-ID-REPLACE",brgResourceId);
+
+ definition.withBody(newBodyStr);
+ notifier.info("SNIRO Async Callback response:\n" + definition.getBody());
+
HttpUriRequest request = buildRequest(definition);
try {
@@ -78,14 +116,15 @@ public class Webhooks extends PostServeAction {
EntityUtils.toString(response.getEntity())
)
);
- System.out.println(String.format("Webhook %s request to %s returned status %s\n\n%s",
- definition.getMethod(),
- definition.getUrl(),
- response.getStatusLine(),
- EntityUtils.toString(response.getEntity())
- )
- );
+ //System.out.println(String.format("Webhook %s request to %s returned status %s\n\n%s",
+ // definition.getMethod(),
+ // definition.getUrl(),
+ // response.getStatusLine(),
+ // EntityUtils.toString(response.getEntity())
+ // )
+ //);
} catch (IOException e) {
+ e.printStackTrace();
throwUnchecked(e);
}
}
@@ -101,6 +140,7 @@ public class Webhooks extends PostServeAction {
definition.getUrl().toString()
);
+
for (HttpHeader header: definition.getHeaders().all()) {
request.addHeader(header.key(), header.firstValue());
}