summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/csit/plans/portal-sdk/testsuite/setup.sh2
-rw-r--r--test/csit/plans/portal/testsuite/setup.sh2
-rw-r--r--test/csit/plans/sdc/nightly/setup.sh40
-rw-r--r--test/csit/plans/sdc/nightly/teardown.sh22
-rw-r--r--test/csit/plans/sdc/nightly/testplan.txt3
-rw-r--r--test/csit/plans/sdnc/healthcheck/setup.sh3
-rw-r--r--test/csit/plans/sdnc/healthcheck/teardown.sh1
-rw-r--r--test/csit/scripts/sdc/clone_and_setup_sdc_data.sh2
-rw-r--r--test/csit/scripts/sdc/start_sdc_containers.sh69
-rw-r--r--test/csit/tests/portal-sdk/testsuites/test1.robot10
-rw-r--r--test/csit/tests/portal/testsuites/test1.robot2
-rw-r--r--test/csit/tests/sdc/nightly/__init__.robot2
-rw-r--r--test/csit/tests/sdc/nightly/test1.robot16
-rw-r--r--test/csit/tests/sdnc/healthcheck/data/data.json4
-rw-r--r--test/csit/tests/sdnc/healthcheck/data/preload.json41
-rw-r--r--test/csit/tests/sdnc/healthcheck/test1.robot45
-rw-r--r--test/ete/labs/windriver/onap-openstack-template.env (renamed from test/ete/labs/windriver/onap-openstack.env)70
-rwxr-xr-xtest/ete/scripts/deploy-onap.sh13
-rwxr-xr-xtest/ete/scripts/run-healthcheck.sh4
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java2
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java5
-rw-r--r--test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java54
22 files changed, 334 insertions, 78 deletions
diff --git a/test/csit/plans/portal-sdk/testsuite/setup.sh b/test/csit/plans/portal-sdk/testsuite/setup.sh
index 456a9343d..0c90dc66b 100644
--- a/test/csit/plans/portal-sdk/testsuite/setup.sh
+++ b/test/csit/plans/portal-sdk/testsuite/setup.sh
@@ -48,7 +48,7 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal
+git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
# Refresh configuration and scripts
cd portal
diff --git a/test/csit/plans/portal/testsuite/setup.sh b/test/csit/plans/portal/testsuite/setup.sh
index 456a9343d..0c90dc66b 100644
--- a/test/csit/plans/portal/testsuite/setup.sh
+++ b/test/csit/plans/portal/testsuite/setup.sh
@@ -48,7 +48,7 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal
+git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
# Refresh configuration and scripts
cd portal
diff --git a/test/csit/plans/sdc/nightly/setup.sh b/test/csit/plans/sdc/nightly/setup.sh
new file mode 100644
index 000000000..ac7a7f3f4
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/setup.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+# Place the scripts in run order:
+
+
+mkdir ${WORKSPACE}/archives
+chmod -R 777 ${WORKSPACE}/archives
+
+source ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/docker_health.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_sanity.sh
+
+
+BE_IP=`get-instance-ip.sh sdc-BE`
+echo BE_IP=${BE_IP}
+
+
+# Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v BE_IP:${BE_IP}"
+
diff --git a/test/csit/plans/sdc/nightly/teardown.sh b/test/csit/plans/sdc/nightly/teardown.sh
new file mode 100644
index 000000000..a5f69819e
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/teardown.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+source ${WORKSPACE}/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh
+
+# $WORKSPACE/archives/clamp-clone deleted with archives folder when tests starts so we keep it at the end for debugging
diff --git a/test/csit/plans/sdc/nightly/testplan.txt b/test/csit/plans/sdc/nightly/testplan.txt
new file mode 100644
index 000000000..3011ad5cb
--- /dev/null
+++ b/test/csit/plans/sdc/nightly/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+sdc/nightly
diff --git a/test/csit/plans/sdnc/healthcheck/setup.sh b/test/csit/plans/sdnc/healthcheck/setup.sh
index 53590264b..5e51b0e6a 100644
--- a/test/csit/plans/sdnc/healthcheck/setup.sh
+++ b/test/csit/plans/sdnc/healthcheck/setup.sh
@@ -120,6 +120,9 @@ if [ "$num_failed_bundles" -ge 1 ]; then
echo " $failed_bundles"
fi
+# Sleep additional 120 to give application time to finish
+sleep 120
+
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
ROBOT_VARIABLES="-v SCRIPTS:${SCRIPTS}"
diff --git a/test/csit/plans/sdnc/healthcheck/teardown.sh b/test/csit/plans/sdnc/healthcheck/teardown.sh
index 4d99b9f31..925e7b732 100644
--- a/test/csit/plans/sdnc/healthcheck/teardown.sh
+++ b/test/csit/plans/sdnc/healthcheck/teardown.sh
@@ -21,5 +21,6 @@ kill-instance.sh sdnc_controller_container
kill-instance.sh sdnc_dgbuilder_container
kill-instance.sh sdnc_portal_container
kill-instance.sh sdnc_db_container
+kill-instance.sh sdnc_ueblistener_container
# $WORKSPACE/archives/appc deleted with archives folder when tests starts so we keep it at the end for debugging
diff --git a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
index c78ffe37c..5dbfb5fc2 100644
--- a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+++ b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
@@ -34,7 +34,7 @@ ls -lR ${WORKSPACE}/data/logs/
cd ${WORKSPACE}/data/clone
-git clone --depth 1 http://gerrit.onap.org/r/sdc -b master
+git clone --depth 1 http://gerrit.onap.org/r/sdc -b ${GERRIT_BRANCH}
chmod -R 777 ${WORKSPACE}/data/clone
diff --git a/test/csit/scripts/sdc/start_sdc_containers.sh b/test/csit/scripts/sdc/start_sdc_containers.sh
index 4db0485d5..0dd373256 100644
--- a/test/csit/scripts/sdc/start_sdc_containers.sh
+++ b/test/csit/scripts/sdc/start_sdc_containers.sh
@@ -35,6 +35,38 @@ export IP=$HOST_IP
#export PREFIX=${NEXUS_DOCKER_REPO}'/openecomp'
export PREFIX='nexus3.onap.org:10001/openecomp'
+
+function monitor_docker {
+
+echo monitor $1 Docker
+sleep 5
+TIME_OUT=800
+INTERVAL=20
+TIME=0
+while [ "$TIME" -lt "$TIME_OUT" ]; do
+
+MATCH=`docker logs --tail 30 $1 | grep "DOCKER STARTED"`
+echo MATCH is -- $MATCH
+
+if [ -n "$MATCH" ]
+ then
+ echo DOCKER start finished in $TIME seconds
+ break
+ fi
+
+ echo Sleep: $INTERVAL seconds before testing if $1 DOCKER is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
+ sleep $INTERVAL
+ TIME=$(($TIME+$INTERVAL))
+done
+
+if [ "$TIME" -ge "$TIME_OUT" ]
+ then
+ echo -e "\e[1;31mTIME OUT: DOCKER was NOT fully started in $TIME_OUT seconds... Could cause problems ...\e[0m"
+fi
+
+
+}
+
#start Elastic-Search
docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --memory 1g --memory-swap=1g --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro -e ES_HEAP_SIZE=1024M --volume ${WORKSPACE}/data/ES:/usr/share/elasticsearch/data --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9200:9200 --publish 9300:9300 ${PREFIX}/sdc-elasticsearch:${RELEASE}
@@ -42,15 +74,7 @@ docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-f
docker run --detach --name sdc-cs --env RELEASE="${RELEASE}" --env ENVNAME="${DEP_ENV}" --env HOST_IP=${IP} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/CS:/var/lib/cassandra --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9042:9042 --publish 9160:9160 ${PREFIX}/sdc-cassandra:${RELEASE}
echo "please wait while CS is starting..."
-echo ""
-c=120 # seconds to wait
-REWRITE="\e[25D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+monitor_docker sdc-cs
#start kibana
@@ -60,34 +84,17 @@ echo -e ""
docker run --detach --name sdc-BE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/BE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 8443:8443 --publish 8080:8080 ${PREFIX}/sdc-backend:${RELEASE}
echo "please wait while BE is starting..."
-echo ""
-c=180 # seconds to wait
-REWRITE="\e[45D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+monitor_docker sdc-BE
#start Front-End
docker run --detach --name sdc-FE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/FE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9443:9443 --publish 8181:8181 ${PREFIX}/sdc-frontend:${RELEASE}
-echo "please wait while FE is starting..."
-echo ""
-c=160 # seconds to wait
-REWRITE="\e[45D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
+echo "docker run sdc-frontend..."
+monitor_docker sdc-FE
-# WAIT 5 minutes maximum and test every 5 seconds if SDC up using HealthCheck API
-echo " WAIT 5 minutes maximum and test every 5 seconds if SDC up using HealthCheck API...."
+echo " WAIT 1 minutes maximum and test every 5 seconds if SDC up using HealthCheck API...."
-TIME_OUT=600
+TIME_OUT=60
INTERVAL=5
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
diff --git a/test/csit/tests/portal-sdk/testsuites/test1.robot b/test/csit/tests/portal-sdk/testsuites/test1.robot
index f3e4017fb..84579d017 100644
--- a/test/csit/tests/portal-sdk/testsuites/test1.robot
+++ b/test/csit/tests/portal-sdk/testsuites/test1.robot
@@ -47,9 +47,14 @@ Portal admin Login To Portal GUI
Portal Admin Navigation Application Link Tab
[Documentation] Logs into Portal GUI as Portal admin
-
Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
- Page Should Contain ONAP Portal
+ Go To ${PORTAL_HOME_PAGE}
+ Dismiss Alert accept=false
+ #Scroll Element Into View xpath=//span[@id='tab-Home']
+ #Click Element xpath=//span[@id='tab-Home']
+ #Click Element xpath=(//span[@id='tab-xDemo-App']/following::i[@class='ion-close-round'])[1]
+ Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
+
Validate SDK Sub Menu
@@ -72,7 +77,6 @@ Click Sample Pages and validate sub Menu
Click Link xpath=//a[@id='parent-item-Sample-Pages']
Click Link xpath=//a[contains(@title,'Notebook')]
Element Text Should Be xpath=//h1[contains(.,'Notebook')] Notebook
- #Click Link xpath=//a[@id='parent-item-Home']
Click Reports and validate sub Menu
[Documentation] Click Reports Tab
diff --git a/test/csit/tests/portal/testsuites/test1.robot b/test/csit/tests/portal/testsuites/test1.robot
index ab5fed47e..70fdcf0d6 100644
--- a/test/csit/tests/portal/testsuites/test1.robot
+++ b/test/csit/tests/portal/testsuites/test1.robot
@@ -886,7 +886,7 @@ Application admin Logout from Portal GUI
[Documentation] Logout from Portal GUI
Click Element xpath=//div[@id='header-user-icon']
#Set Selenium Implicit Wait 3000
- Click Button xpath=//button[contains(.,'Log out')]
+ Click Button xpath=//button[contains(text(),'Log out')]
#Set Selenium Implicit Wait 3000
Title Should Be Login
diff --git a/test/csit/tests/sdc/nightly/__init__.robot b/test/csit/tests/sdc/nightly/__init__.robot
new file mode 100644
index 000000000..8ee10d5f6
--- /dev/null
+++ b/test/csit/tests/sdc/nightly/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Sdc - HealthCheck
diff --git a/test/csit/tests/sdc/nightly/test1.robot b/test/csit/tests/sdc/nightly/test1.robot
new file mode 100644
index 000000000..6d4dc242d
--- /dev/null
+++ b/test/csit/tests/sdc/nightly/test1.robot
@@ -0,0 +1,16 @@
+*** Settings ***
+Library Collections
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Test Cases ***
+Get Requests health check ok
+ [Tags] get
+ CreateSession sdc-be http://localhost:8080
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request sdc-be /sdc2/rest/healthCheck headers=&{headers}
+ Should Be Equal As Strings ${resp.status_code} 200
+ @{ITEMS}= Copy List ${resp.json()['componentsInfo']}
+ : FOR ${ELEMENT} IN @{ITEMS}
+ \ Log ${ELEMENT['healthCheckComponent']} ${ELEMENT['healthCheckStatus']}
diff --git a/test/csit/tests/sdnc/healthcheck/data/data.json b/test/csit/tests/sdnc/healthcheck/data/data.json
new file mode 100644
index 000000000..583e26fb9
--- /dev/null
+++ b/test/csit/tests/sdnc/healthcheck/data/data.json
@@ -0,0 +1,4 @@
+{
+ "input" : {
+ }
+}
diff --git a/test/csit/tests/sdnc/healthcheck/data/preload.json b/test/csit/tests/sdnc/healthcheck/data/preload.json
new file mode 100644
index 000000000..b53afa859
--- /dev/null
+++ b/test/csit/tests/sdnc/healthcheck/data/preload.json
@@ -0,0 +1,41 @@
+{
+ "input": {
+ "vnf-topology-information": {
+ "vnf-topology-identifier": {
+ "service-type": "robot_demo",
+ "vnf-name": "vf_robot_module",
+ "vnf-type": "vf_robot_type",
+ "generic-vnf-name": "generic_vnf_name",
+ "generic-vnf-type": "generic_vnf_type"
+ },
+ "vnf-assignments": {
+ "availability-zones": [],
+ "vnf-networks": [],
+ "vnf-vms": []
+ },
+ "vnf-parameters": [
+ {
+ "vnf-parameter-name": "ngm1_management_ip_0",
+ "vnf-parameter-value":"127.0.0.1"
+ },
+ {
+ "vnf-parameter-name": "ngm2_management_ip_1",
+ "vnf-parameter-value":"127.0.0.2"
+ }
+ ]
+ },
+ "request-information": {
+ "request-id": "robot12",
+ "order-version": "1",
+ "notification-url": "openecomp.org",
+ "order-number": "1",
+ "request-action": "PreloadVNFRequest"
+ },
+ "sdnc-request-header": {
+ "svc-request-id": "robot12",
+ "svc-notification-url": "http:\/\/openecomp.org:8080\/adapters\/rest\/SDNCNotify",
+ "svc-action": "reserve"
+ }
+ }
+}
+
diff --git a/test/csit/tests/sdnc/healthcheck/test1.robot b/test/csit/tests/sdnc/healthcheck/test1.robot
index 1adb9a6b3..4bf3d25e7 100644
--- a/test/csit/tests/sdnc/healthcheck/test1.robot
+++ b/test/csit/tests/sdnc/healthcheck/test1.robot
@@ -1,16 +1,45 @@
*** Settings ***
-Library OperatingSystem
-Library Process
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library String
*** Variables ***
+${SDN_APIDOCS_URI} /apidoc/apis
+${SDN_HEALTHCHECK_OPERATION_PATH} /operations/SLI-API:healthcheck
+${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} /operations/VNF-API:preload-vnf-topology-operation
-${health_check} ${SCRIPTS}/health_check.sh
+*** Test Cases ***
+Healthcheck API
+ Create Session sdnc http://localhost:8282/restconf
+ ${data}= Get Binary File ${CURDIR}${/}data${/}data.json
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Post Request sdnc ${SDN_HEALTHCHECK_OPERATION_PATH} data=${data} headers=${headers}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['output']['response-code']} 200
+
+Check SLI-API
+ Create Session sdnc http://localhost:8282
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers}
+ Log ${resp.content}
+ Should Contain ${resp.content} SLI-API
-*** Test Cases ***
-Health check test case for SDNC
- [Documentation] Health check
- ${result_hc}= Run Process bash ${health_check} > log_hc.txt shell=yes
- Should Be Equal As Integers ${result_hc.rc} 0
+Check VNF-API
+ Create Session sdnc http://localhost:8282
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers}
+ Log ${resp.content}
+ Should Contain ${resp.content} VNF-API
+Test Preload
+ Create Session sdnc http://localhost:8282/restconf
+ ${data}= Get Binary File ${CURDIR}${/}data${/}preload.json
+ &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json
+ ${resp}= Post Request sdnc ${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} data=${data} headers=${headers}
+ Log ${resp.content}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['output']['response-code']} 200
diff --git a/test/ete/labs/windriver/onap-openstack.env b/test/ete/labs/windriver/onap-openstack-template.env
index b0380a17a..b1b31ee88 100644
--- a/test/ete/labs/windriver/onap-openstack.env
+++ b/test/ete/labs/windriver/onap-openstack-template.env
@@ -8,6 +8,8 @@ parameters:
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
+ public_net_name: external
+
ubuntu_1404_image: ubuntu-14-04-cloud-amd64
ubuntu_1604_image: ubuntu-16-04-cloud-amd64
@@ -40,11 +42,13 @@ parameters:
artifacts_version: 1.1.0-SNAPSHOT
- openstack_tenant_id: SAMPLE
+ openstack_tenant_id: ${OS_PROJECT_ID}
+
+ openstack_tenant_name: ${OS_PROJECT_NAME}
- openstack_username: SAMPLE
+ openstack_username: ${OS_USERNAME}
- openstack_api_key: SAMPLE
+ openstack_api_key: ${OS_PASSWORD}
openstack_auth_method: password
@@ -63,7 +67,7 @@ parameters:
# #
######################
- dns_list: 8.8.8.8
+ dns_list: ["10.12.25.5", "8.8.8.8"]
external_dns: 8.8.8.8
oam_network_cidr: 10.0.0.0/16
@@ -104,7 +108,7 @@ parameters:
# dcae_base_environment: 1-NIC-FLOATING-IPS
- dcae_zone: ZONE
+# dcae_zone: ZONE
# dcae_state: STATE
@@ -116,22 +120,44 @@ parameters:
# dcae_code_version: 1.1.0
- dcaeos_cloud_env: PUT DACE TARGET DEPLOYMENT STACK'S FLAVOR (e.g. OpenStack) HERE
- dcaeos_keystone_url: PUT DACE TARGET DEPLOYMENT STACK'S KEYSTONE URL HERE
- dcaeos_openstack_region: PUT DACE TARGET DEPLOYMENT STACK'S REGION HERE
- dcaeos_openstack_tenant_id: PUT DACE TARGET DEPLOYMENT STACK'S TENANT ID HERE
- dcaeos_openstack_username: PUT DACE TARGET DEPLOYMENT STACK'S USERNAME HERE
- dcaeos_openstack_password: PUT DACE TARGET DEPLOYMENT STACK'S PASSWORD HERE
- dcaeos_dcae_key_name: PUT DACE TARGET DEPLOYMENT STACK'S UPLOADED KEY-PAIR NAME HERE
- dcaeos_dcae_pub_key: PUT DACE TARGET DEPLOYMENT STACK'S PUBLIC KEY HERE
- dcaeos_private_key: PUT DACE TARGET DEPLOYMENT STACK'S PRIVATE KEY HERE
- dcaeos_openstack_private_network_name: PUT DACE TARGET DEPLOYMENT STACK'S INTERNAL NETWOKR ID HERE
- dcaeos_public_net_id: PUT DACE TARGET DEPLOYMENT STACK'S PUBLIC NETWORK ID HERE
- dcaeos_ubuntu_1604_image: PUT DACE TARGET DEPLOYMENT STACK'S UBUNTU1604 IMAGE ID (TO BE USED BY DCAE VMS) HERE
- dcaeos_centos_7_image: PUT DACE TARGET DEPLOYMENT STACK'S CENTOS7 IMAGE ID (TO BE USED BY DCAE VMS) HERE
- dcaeos_security_group: PUT DACE TARGET DEPLOYMENT STACK'S SECURITY GROUP ID (TO BE USED BY DCAE VMS) HERE
- dcaeos_flavor_id: PUT DACE TARGET DEPLOYMENT STACK'S VM FLAVOR ID (TO BE USED BY DCAE VMS) HERE
-
+ dnsaas_config_enabled: true
+ dnsaas_region: RegionOne
+ dnsaas_keystone_url: http://10.12.25.5:5000/v3
+ dnsaas_tenant_name: ${OS_PROJECT_NAME}
+ dnsaas_username: ${OS_USERNAME}
+ dnsaas_password: ${OS_PASSWORD}
+ dcae_keystone_url: "http://10.0.14.1/api/multicloud-titanium_cloud/v0/pod25_RegionOne/identity/v2.0"
+ dcae_centos_7_image: CentOS-7
+ dcae_security_group: default
+ dcae_key_name: 'id_lji_onap'
+ dcae_public_key: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDloKgBHx/yKRV77lr828rqa+zK+iTZpqmH3WSUU3vdNhSbEMNKkGVXR4+Gq1hNd8UNF+jMg87kOFSlQbE5jvsQMWuu1unxLKbH3AeXJd21gR1Gx4KXjkWsfl4URWMZ9WNvH0hMvqEV5SdFIDQmx07C/NOfy3R5N1pCgNsh9RT+EpDFh7jmimxrAqdxP0HnGFC2oM3rYMnzfh2/+Obkag6O3RZOkYx/WLQKbOKTi8K1C4UM5pwFzLT/vC+d9DF8pE7P9dlrbHTF9c3IGsP5oCa8CQ/WE4lVj/L9/iFNs0WsxdYaOnf11GJmPs663hltvWbQiqsFpdjX6tk/zMb3Xipz ubuntu@lusheng-sm-b781d54e-48ac-42fa-a780-3289b56e6598'
+ dcae_private_key: '-----BEGIN RSA PRIVATE KEY-----\n
+MIIEowIBAAKCAQEA5aCoAR8f8ikVe+5a/NvK6mvsyvok2aaph91klFN73TYUmxDD\n
+SpBlV0ePhqtYTXfFDRfozIPO5DhUpUGxOY77EDFrrtbp8Symx9wHlyXdtYEdRseC\n
+l45FrH5eFEVjGfVjbx9ITL6hFeUnRSA0JsdOwvzTn8t0eTdaQoDbIfUU/hKQxYe4\n
+5opsawKncT9B5xhQtqDN62DJ834dv/jm5GoOjt0WTpGMf1i0Cmzik4vCtQuFDOac\n
+Bcy0/7wvnfQxfKROz/XZa2x0xfXNyBrD+aAmvAkP1hOJVY/y/f4hTbNFrMXWGjp3\n
+9dRiZj7Out4Zbb1m0IqrBaXY1+rZP8zG914qcwIDAQABAoIBADusR1ybQQsGQC4H\n
+oB+L4qyOxWduH/trwyyqjQO6ujqfXjV54rZS1fMbhqHhjz36fPvNFdNoAXDvbpfP\n
+fYXOsVHg9fHmqb7h8qmHdXWDAp2zYu9M05QdBeAwqrQr3/gT+/YZtAk1lNkCxvTA\n
+zKyzKLWlTD1/etIxX0RfBhNKBF2N6X8faO2QFxXKFomFsSRUJgbAb3sJFWpGBQAE\n
+FcwpR3wB188y/qXXD5XY52zzKzFTsJW4Y48j/3tSirT68QzsqUm4CZl1/98oOjE+\n
+GQC3GCNZDHqh5n23KzTy0SuxhqVT30Ot9S497h1nEbgxZPjK5chHQjuIpGZIyEme\n
+TQOn5BkCgYEA+UVwel1PSV55fd3nRb9rwXEqeNg3X0Zlvx8qvPwfzdTSK5XCMTPe\n
+C6V3BS8IHvpXsuzmLkrCBUy+O3rOkbhlXhGvNAndBE9y3NY2K38hfiTM38irPwzB\n
+2Ksrc/Nu5uQaLk/5t08N3W6cZCml3aX4PVkJRCcH/K032ohGx2u8tR8CgYEA69N4\n
+sTIy56076TtgXM6A26W/HGY7X1olM82ZABnwCiSzXJWQc9QeepCiZCqeiyzxZAs2\n
+sBYb0+uKMkEYRpZUSCUtFnwoKyK9bFSo9lo8YyOUopi2e1KYfKhC2tR6HhNp+WrY\n
+YGMlwMmPdbd1NqbeTQdLnJwqJjKWhFM5mVzPLC0CgYAoFNxTMLHREFnkvUu00WTY\n
+RAQaTloI/d7abn6GyNusUy5AR+Jj2v03TjHnKXra0FJNuP4CL48nHwHvun2AvO+/\n
+woQzj+p0CPplPCSVtemCyRQQX8n5Z2m8FznzeQ86HS+AhueWbCXEl0aabH/5NfjG\n
+lIyC4uvL22aBwyvuYQqE7QKBgE6zKREBbqmQT3EbZqIyLCChJLEmkOPWYpnyIAA/\n
+p7LK4qygIS/2dyFyCS+iZXyOyBQaBesnxauobFsvDBnqa0AUYAKj9ofGtS5k5moo\n
+XQS8yAqnKibnvonDYWRECmjlE7Wv6XvpOp0m5uBjFBPkBkXwjFQ8bXiH42FPoOZE\n
+acMVAoGBAJk4+VGPw1Z7NZbJiDRtdCOY1SZBqWXf1Mth6H8JFUGPLyTfFq1cGpN8\n
++Odna/7rl4jCiiWrCJyob4F4DF0AE1t3lEa1XgWwDIdagnldH9e5z/psR3I/p1wv\n
+m3bZLBvlAVvCajosd/qeX0FkPGZlgk1dGZ8/7SyK7NITqAfgMMRS\n
+-----END RSA PRIVATE KEY-----'
################################
# #
@@ -145,7 +171,7 @@ parameters:
mr_branch: master
dcae_branch: master
policy_branch: master
- portal_branch: master
+ portal_branch: release-1.3.0
robot_branch: master
sdc_branch: master
sdnc_branch: master
diff --git a/test/ete/scripts/deploy-onap.sh b/test/ete/scripts/deploy-onap.sh
index 02943935d..69296ab17 100755
--- a/test/ete/scripts/deploy-onap.sh
+++ b/test/ete/scripts/deploy-onap.sh
@@ -8,10 +8,19 @@ source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
# Delete all existing stacks
STACKS=$(openstack stack list -c "Stack Name" -f value)
-echo "Deleting Stacks ${STACKS}"
-openstack stack delete -y $STACKS
+if [ ! -z "${STACKS}" ]; then
+ echo "Deleting Stacks ${STACKS}"
+ openstack stack delete -y $STACKS
+else
+ echo "No existing stacks to delete."
+fi
STACK="ete-$(uuidgen | cut -c-8)"
echo "New Stack Name: ${STACK}"
+
+
+cp ${ONAP_WORKDIR}/demo/heat/ONAP/onap_openstack.env ${WORKSPACE}/test/ete/labs/windriver/onap-openstack-demo.env
+envsubst < ${WORKSPACE}/test/ete/labs/windriver/onap-openstack-template.env > ${WORKSPACE}/test/ete/labs/windriver/onap-openstack.env
+
openstack stack create -t ${ONAP_WORKDIR}/demo/heat/ONAP/onap_openstack.yaml -e ${WORKSPACE}/test/ete/labs/windriver/onap-openstack.env $STACK
diff --git a/test/ete/scripts/run-healthcheck.sh b/test/ete/scripts/run-healthcheck.sh
index 45142b006..1555dad85 100755
--- a/test/ete/scripts/run-healthcheck.sh
+++ b/test/ete/scripts/run-healthcheck.sh
@@ -13,6 +13,10 @@ cd $WORKSPACE/test/ete/scripts
ROBOT_IP=$(./get-floating-ip.sh onap-robot)
echo "ROBOT_IP=${ROBOT_IP}"
+# allow direct login as root
+ssh -o StrictHostKeychecking=no -i ${SSH_KEY} ubuntu@${ROBOT_IP} 'sudo cp /home/ubuntu/.ssh/authorized_keys /root/.ssh/'
+
ssh -o StrictHostKeychecking=no -i ${SSH_KEY} root@${ROBOT_IP} "OS_PASSWORD_INPUT=$OS_PASSWORD_INPUT bash -s" < ./remote/run-robot.sh
LOG_DIR=$(ssh -o StrictHostKeychecking=no -i ${SSH_KEY} root@${ROBOT_IP} "ls -1t /opt/eteshare/logs | head -1")
+echo "Browse Robot results at http://${ROBOT_IP}:88/logs/${LOG_DIR}/"
rsync -e "ssh -i ${SSH_KEY}" -avPz root@${ROBOT_IP}:/opt/eteshare/logs/${LOG_DIR}/ $WORKSPACE/archives/
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
index 714d3a340..5977a8a2f 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/MockApplication.java
@@ -93,7 +93,7 @@ public class MockApplication {
// Register extension
options.extensions("org.onap.integration.test.mocks.sniroemulator.extension.Webhooks");
// Register notifier
- options.notifier(new ConsoleNotifier(true));
+ options.notifier(new ConsoleNotifier(true));
wireMockServer = new WireMockServer(options);
wireMockServer.enableRecordMappings(mappingsFileSource, filesFileSource);
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
index 60592b3f0..304971572 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/WebhookDefinition.java
@@ -74,6 +74,11 @@ public class WebhookDefinition {
return body.isBinary() ? null : body.asString();
}
+ public String getBase64BodyAsString() {
+ return body.asString();
+ }
+
+
@JsonIgnore
public byte[] getBinaryBody() {
return body.asBytes();
diff --git a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
index e3fc286cb..78fb735d2 100644
--- a/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
+++ b/test/mocks/sniroemulator/src/main/java/org/onap/integration/test/mocks/sniroemulator/extension/Webhooks.java
@@ -19,6 +19,8 @@
*/
package org.onap.integration.test.mocks.sniroemulator.extension;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
import com.github.tomakehurst.wiremock.common.Notifier;
import com.github.tomakehurst.wiremock.core.Admin;
import com.github.tomakehurst.wiremock.extension.Parameters;
@@ -32,8 +34,11 @@ import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.util.EntityUtils;
+import com.github.tomakehurst.wiremock.common.Json;
+
import java.io.IOException;
+import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
@@ -46,6 +51,9 @@ public class Webhooks extends PostServeAction {
private final ScheduledExecutorService scheduler;
private final HttpClient httpClient;
+ private String tunnelResourceId = "NONE";
+ private String brgResourceId = "NONE";
+ private String vgResourceId = "NONE";
public Webhooks() {
scheduler = Executors.newScheduledThreadPool(10);
@@ -62,10 +70,40 @@ public class Webhooks extends PostServeAction {
final WebhookDefinition definition = parameters.as(WebhookDefinition.class);
final Notifier notifier = notifier();
+
scheduler.schedule(
new Runnable() {
@Override
public void run() {
+ JsonNode node = Json.node(serveEvent.getRequest().getBodyAsString());
+ // set callback url from SO request
+ String callBackUrl = node.get("requestInfo").get("callbackUrl").asText();
+ notifier.info("!!! Call Back Url : \n" + callBackUrl);
+ definition.withUrl(callBackUrl);
+
+ // set servicesResourceIds for each resource from SO request placement Demand
+ //System.out.println ("PI: \n" + node.textValue());
+ JsonNode placementDemandList = node.get("placementInfo").get("demandInfo").get("placementDemand");
+ if (placementDemandList !=null && placementDemandList.isArray()){
+ for (int i=0;i<placementDemandList.size();i++){
+ JsonNode resourceInfo = placementDemandList.get(i);
+ String resourceModuleName = resourceInfo.get("resourceModuleName").asText();
+ if (resourceModuleName.toLowerCase().matches("(.*)tunnel(.*)")){
+ tunnelResourceId = resourceInfo.get("serviceResourceId").asText();
+ } else if (resourceModuleName.toLowerCase().matches("(.*)brg(.*)")) {
+ brgResourceId = resourceInfo.get("serviceResourceId").asText();
+ }else {
+ vgResourceId = resourceInfo.get("serviceResourceId").asText();
+ }
+ }
+ }
+
+ String stubbedBodyStr = definition.getBase64BodyAsString();
+ String newBodyStr = stubbedBodyStr.replace("TUNNEL-RESOURCE-ID-REPLACE",tunnelResourceId).replace("VGW-RESOURCE-ID-REPLACE",vgResourceId).replace("BRG-RESOURCE-ID-REPLACE",brgResourceId);
+
+ definition.withBody(newBodyStr);
+ notifier.info("SNIRO Async Callback response:\n" + definition.getBody());
+
HttpUriRequest request = buildRequest(definition);
try {
@@ -78,14 +116,15 @@ public class Webhooks extends PostServeAction {
EntityUtils.toString(response.getEntity())
)
);
- System.out.println(String.format("Webhook %s request to %s returned status %s\n\n%s",
- definition.getMethod(),
- definition.getUrl(),
- response.getStatusLine(),
- EntityUtils.toString(response.getEntity())
- )
- );
+ //System.out.println(String.format("Webhook %s request to %s returned status %s\n\n%s",
+ // definition.getMethod(),
+ // definition.getUrl(),
+ // response.getStatusLine(),
+ // EntityUtils.toString(response.getEntity())
+ // )
+ //);
} catch (IOException e) {
+ e.printStackTrace();
throwUnchecked(e);
}
}
@@ -101,6 +140,7 @@ public class Webhooks extends PostServeAction {
definition.getUrl().toString()
);
+
for (HttpHeader header: definition.getHeaders().all()) {
request.addHeader(header.key(), header.firstValue());
}