aboutsummaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rwxr-xr-xtest/csit/plans/dmaap/mrpubsub/setup.sh95
-rwxr-xr-xtest/csit/plans/dmaap/mrpubsub/teardown.sh22
-rw-r--r--test/csit/plans/dmaap/mrpubsub/testplan.txt2
-rw-r--r--test/csit/plans/multicloud-ocata/functionality1/setup.sh2
-rw-r--r--test/csit/plans/sdc/healthCheck/setup.sh33
-rw-r--r--test/csit/plans/sdc/healthCheck/teardown.sh22
-rw-r--r--test/csit/plans/sdc/healthCheck/testplan.txt3
-rw-r--r--test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh2
-rwxr-xr-xtest/csit/run-csit.sh2
-rw-r--r--test/csit/scripts/sdc/clone_and_setup_sdc_data.sh52
-rw-r--r--test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh31
-rw-r--r--test/csit/scripts/sdc/start_sdc_containers.sh108
-rwxr-xr-xtest/csit/tests/dmaap/mrpubsub/mrpubsub.robot73
-rw-r--r--test/csit/tests/holmes/testcase/CommonKeywords/HttpRequest.robot4
-rw-r--r--test/csit/tests/holmes/testcase/RuleMgt/Rule-Keywords.robot4
-rw-r--r--test/csit/tests/holmes/testcase/RuleMgt/Rule-Mgt.robot4
-rw-r--r--test/csit/tests/sdc/healthCheck/__init__.robot2
-rw-r--r--test/csit/tests/sdc/healthCheck/test1.robot16
-rw-r--r--test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot13
19 files changed, 474 insertions, 16 deletions
diff --git a/test/csit/plans/dmaap/mrpubsub/setup.sh b/test/csit/plans/dmaap/mrpubsub/setup.sh
new file mode 100755
index 000000000..3e8950f2b
--- /dev/null
+++ b/test/csit/plans/dmaap/mrpubsub/setup.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# ONAP DMAAP MR
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights
+# reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+# ===================================================================
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+# Place the scripts in run order:
+source ${SCRIPTS}/common_functions.sh
+
+# Clone DMaaP Message Router repo
+mkdir -p $WORKSPACE/archives/dmaapmr
+cd $WORKSPACE/archives/dmaapmr
+#unset http_proxy https_proxy
+git clone --depth 1 http://gerrit.onap.org/r/dmaap/messagerouter/messageservice -b master
+git pull
+cd $WORKSPACE/archives/dmaapmr/messageservice/src/main/resources/docker-compose
+cp $WORKSPACE/archives/dmaapmr/messageservice/bundleconfig-local/etc/appprops/MsgRtrApi.properties /var/tmp/
+
+
+# start DMaaP MR containers with docker compose and configuration from docker-compose.yml
+docker-compose up -d
+
+# Wait for initialization of Docker contaienr for DMaaP MR, Kafka and Zookeeper
+for i in {1..50}; do
+ if [ $(docker inspect --format '{{ .State.Running }}' dockercompose_dmaap_1) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' dockercompose_zookeeper_1) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' dockercompose_dmaap_1) ]
+ then
+ echo "DMaaP Service Running"
+ break
+ else
+ echo sleep $i
+ sleep $i
+ fi
+done
+
+
+DMAAP_MR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' dockercompose_dmaap_1)
+KAFKA_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' dockercompose_kafka_1)
+ZOOKEEPER_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' dockercompose_zookeeper_1)
+
+echo DMAAP_MR_IP=${DMAAP_MR_IP}
+echo KAFKA_IP=${KAFKA_IP}
+echo ZOOKEEPER_IP=${ZOOKEEPER_IP}
+
+# Initial docker-compose up and down is for populating kafka and zookeeper IPs in /var/tmp/MsgRtrApi.properites
+docker-compose down
+
+# Update kafkfa and zookeeper properties in MsgRtrApi.propeties which will be copied to DMaaP Container
+sed -i -e 's/<zookeeper_host>/'$ZOOKEEPER_IP'/' /var/tmp/MsgRtrApi.properties
+sed -i -e 's/<kafka_host>:<kafka_port>/'$KAFKA_IP':9092/' /var/tmp/MsgRtrApi.properties
+
+docker-compose build
+docker-compose up -d
+
+# Wait for initialization of Docker containers
+for i in {1..50}; do
+ if [ $(docker inspect --format '{{ .State.Running }}' dockercompose_dmaap_1) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' dockercompose_zookeeper_1) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' dockercompose_dmaap_1) ]
+ then
+ echo "DMaaP Service Running"
+ break
+ else
+ echo sleep $i
+ sleep $i
+ fi
+done
+
+# Wait for initialization of docker services
+for i in {1..50}; do
+ curl -sS -m 1 ${DMAAP_MR_IP}:3904/events/TestTopic && break
+ echo sleep $i
+ sleep $i
+done
+
+#Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v DMAAP_MR_IP:${DMAAP_MR_IP}"
diff --git a/test/csit/plans/dmaap/mrpubsub/teardown.sh b/test/csit/plans/dmaap/mrpubsub/teardown.sh
new file mode 100755
index 000000000..1b4303240
--- /dev/null
+++ b/test/csit/plans/dmaap/mrpubsub/teardown.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+kill-instance.sh dockercompose_dmaap_1
+kill-instance.sh dockercompose_kafka_1
+kill-instance.sh dockercompose_zookeeper_1
diff --git a/test/csit/plans/dmaap/mrpubsub/testplan.txt b/test/csit/plans/dmaap/mrpubsub/testplan.txt
new file mode 100644
index 000000000..6a98eb790
--- /dev/null
+++ b/test/csit/plans/dmaap/mrpubsub/testplan.txt
@@ -0,0 +1,2 @@
+# Place the suites in run order.
+dmaap/mrpubsub
diff --git a/test/csit/plans/multicloud-ocata/functionality1/setup.sh b/test/csit/plans/multicloud-ocata/functionality1/setup.sh
index 5630849cf..75411781e 100644
--- a/test/csit/plans/multicloud-ocata/functionality1/setup.sh
+++ b/test/csit/plans/multicloud-ocata/functionality1/setup.sh
@@ -20,7 +20,7 @@ source ${SCRIPTS}/common_functions.sh
# start multicloud-ocata
docker run -d --name multicloud-ocata nexus3.onap.org:10001/onap/multicloud/openstack-ocata
SERVICE_IP=`get-instance-ip.sh multicloud-ocata`
-SERVICE_PORT=9004
+SERVICE_PORT=9006
for i in {1..50}; do
curl -sS ${SERVICE_IP}:${SERVICE_PORT} && break
diff --git a/test/csit/plans/sdc/healthCheck/setup.sh b/test/csit/plans/sdc/healthCheck/setup.sh
new file mode 100644
index 000000000..f247be656
--- /dev/null
+++ b/test/csit/plans/sdc/healthCheck/setup.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+# Place the scripts in run order:
+
+
+source ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+
+source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh
+
+
+BE_IP=`get-instance-ip.sh sdc-BE`
+echo BE_IP=${BE_IP}
+
+
+# Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v BE_IP:${BE_IP}"
+
diff --git a/test/csit/plans/sdc/healthCheck/teardown.sh b/test/csit/plans/sdc/healthCheck/teardown.sh
new file mode 100644
index 000000000..a5f69819e
--- /dev/null
+++ b/test/csit/plans/sdc/healthCheck/teardown.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+source ${WORKSPACE}/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh
+
+# $WORKSPACE/archives/clamp-clone deleted with archives folder when tests starts so we keep it at the end for debugging
diff --git a/test/csit/plans/sdc/healthCheck/testplan.txt b/test/csit/plans/sdc/healthCheck/testplan.txt
new file mode 100644
index 000000000..2b2db1ede
--- /dev/null
+++ b/test/csit/plans/sdc/healthCheck/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+sdc/healthCheck
diff --git a/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
index 382cb7a8d..64fa5a4d0 100644
--- a/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-driver-vnfm-svnfm/sanity-check/setup.sh
@@ -64,4 +64,4 @@ for i in {1..10}; do
done
# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v ZTEVMANAGERDRIVER_IP:${ZTEVMANAGERDRIVER_IP} -v MSB_IP:${MSB_IAG_IP} -v SERVICE_IP:${SERVICE_IP}"
+ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v ZTEVMANAGERDRIVER_IP:${ZTEVMANAGERDRIVER_IP} -v MSB_IP:${MSB_IAG_IP} -v SERVICE_IP:${SERVICE_IP} -v SCRIPTS:${SCRIPTS}"
diff --git a/test/csit/run-csit.sh b/test/csit/run-csit.sh
index f499d8ed1..3070239b7 100755
--- a/test/csit/run-csit.sh
+++ b/test/csit/run-csit.sh
@@ -105,7 +105,7 @@ pip install --upgrade ${ROBOT_VENV}/src/onap/testsuite/python-testing-utils
# install chrome driver
if [ ! -x ${ROBOT_VENV}/bin/chromedriver ]; then
pushd ${ROBOT_VENV}/bin
- wget -N http://chromedriver.storage.googleapis.com/2.32/chromedriver_linux64.zip
+ wget -N http://chromedriver.storage.googleapis.com/2.27/chromedriver_linux64.zip
unzip chromedriver_linux64.zip
chmod +x chromedriver
popd
diff --git a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
new file mode 100644
index 000000000..da421e4cf
--- /dev/null
+++ b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# ONAP CLAMP
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights
+# reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+# ===================================================================
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+echo "This is ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh"
+
+# Clone sdc enviroment template
+mkdir -p ${WORKSPACE}/data/environments/
+mkdir -p ${WORKSPACE}/data/clone/
+
+cd ${WORKSPACE}/data/clone
+git clone --depth 1 http://gerrit.onap.org/r/sdc -b master
+
+
+# set enviroment variables
+
+ENV_NAME=CSIT
+MR_IP_ADDR=10.0.0.1
+
+if [ -e /opt/config/public_ip.txt ]
+ then
+ IP_ADDRESS=$(cat /opt/config/public_ip.txt)
+ else
+ IP_ADDRESS=$(ifconfig eth0 | grep "inet addr" | tr -s ' ' | cut -d' ' -f3 | cut -d':' -f2)
+ fi
+
+ cat ${WORKSPACE}/data/clone/sdc/sdc-os-chef/environments/Template.json | sed "s/yyy/"$IP_ADDRESS"/g" > ${WORKSPACE}/data/environments/$ENV_NAME.json
+ sed -i "s/xxx/"$ENV_NAME"/g" ${WORKSPACE}/data/environments/$ENV_NAME.json
+ sed -i "s/\"ueb_url_list\":.*/\"ueb_url_list\": \""$MR_IP_ADDR","$MR_IP_ADDR"\",/g" ${WORKSPACE}/data/environments/$ENV_NAME.json
+ sed -i "s/\"fqdn\":.*/\"fqdn\": [\""$MR_IP_ADDR"\", \""$MR_IP_ADDR"\"]/g" ${WORKSPACE}/data/environments/$ENV_NAME.json
+
+
diff --git a/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh b/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh
new file mode 100644
index 000000000..e03284248
--- /dev/null
+++ b/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Copyright 2016-2017 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Modifications copyright (c) 2017 AT&T Intellectual Property
+#
+
+echo "This is ${WORKSPACE}/test/csit/scripts/sdc/kill_and_remove_dataFolder.sh"
+
+#kill and remove all sdc dockers
+docker stop $(docker ps -a -q --filter="name=sdc")
+docker rm $(docker ps -a -q --filter="name=sdc")
+
+
+#delete data folder
+
+rm -rf ${WORKSPACE}/data/*
+
+
diff --git a/test/csit/scripts/sdc/start_sdc_containers.sh b/test/csit/scripts/sdc/start_sdc_containers.sh
new file mode 100644
index 000000000..31105acb0
--- /dev/null
+++ b/test/csit/scripts/sdc/start_sdc_containers.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# ONAP CLAMP
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights
+# reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END============================================
+# ===================================================================
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+echo "This is ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh"
+
+
+RELEASE=latest
+LOCAL=false
+SKIPTESTS=false
+DEP_ENV=CSIT
+#[ -f /opt/config/nexus_username.txt ] && NEXUS_USERNAME=$(cat /opt/config/nexus_username.txt) || NEXUS_USERNAME=release
+#[ -f /opt/config/nexus_password.txt ] && NEXUS_PASSWD=$(cat /opt/config/nexus_password.txt) || NEXUS_PASSWD=sfWU3DFVdBr7GVxB85mTYgAW
+#[ -f /opt/config/nexus_docker_repo.txt ] && NEXUS_DOCKER_REPO=$(cat /opt/config/nexus_docker_repo.txt) || NEXUS_DOCKER_REPO=ecomp-nexus:${PORT}
+#[ -f /opt/config/nexus_username.txt ] && docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO
+export IP=`ifconfig eth0 | awk -F: '/inet addr/ {gsub(/ .*/,"",$2); print $2}'`
+#export PREFIX=${NEXUS_DOCKER_REPO}'/openecomp'
+export PREFIX='nexus3.onap.org:10001/openecomp'
+
+#start Elastic-Search
+docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --memory 1g --memory-swap=1g --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro -e ES_HEAP_SIZE=1024M --volume ${WORKSPACE}/data/ES:/usr/share/elasticsearch/data --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9200:9200 --publish 9300:9300 ${PREFIX}/sdc-elasticsearch:${RELEASE}
+
+#start cassandra
+docker run --detach --name sdc-cs --env RELEASE="${RELEASE}" --env ENVNAME="${DEP_ENV}" --env HOST_IP=${IP} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/CS:/var/lib/cassandra --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9042:9042 --publish 9160:9160 ${PREFIX}/sdc-cassandra:${RELEASE}
+
+echo "please wait while CS is starting..."
+echo ""
+c=120 # seconds to wait
+REWRITE="\e[25D\e[1A\e[K"
+while [ $c -gt 0 ]; do
+ c=$((c-1))
+ sleep 1
+ echo -e "${REWRITE}$c"
+done
+echo -e ""
+
+
+#start kibana
+docker run --detach --name sdc-kbn --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 5601:5601 ${PREFIX}/sdc-kibana:${RELEASE}
+
+#start sdc-backend
+docker run --detach --name sdc-BE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/BE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 8443:8443 --publish 8080:8080 ${PREFIX}/sdc-backend:${RELEASE}
+
+echo "please wait while BE is starting..."
+echo ""
+c=120 # seconds to wait
+REWRITE="\e[45D\e[1A\e[K"
+while [ $c -gt 0 ]; do
+ c=$((c-1))
+ sleep 1
+ echo -e "${REWRITE}$c"
+done
+echo -e ""
+
+#start Front-End
+docker run --detach --name sdc-FE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/FE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9443:9443 --publish 8181:8181 ${PREFIX}/sdc-frontend:${RELEASE}
+
+echo "please wait while FE is starting..."
+echo ""
+c=120 # seconds to wait
+REWRITE="\e[45D\e[1A\e[K"
+while [ $c -gt 0 ]; do
+ c=$((c-1))
+ sleep 1
+ echo -e "${REWRITE}$c"
+done
+echo -e ""
+
+
+
+
+#TIME=0
+#while [ "$TIME" -lt "$TIME_OUT" ]; do
+# response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:8080/restservices/clds/v1/clds/healthcheck); echo $response
+
+# if [ "$response" == "200" ]; then
+# echo Clamp and its database well started in $TIME seconds
+# break;
+# fi
+
+# echo Sleep: $INTERVAL seconds before testing if Clamp is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
+# sleep $INTERVAL
+# TIME=$(($TIME+$INTERVAL))
+#done
+
+#if [ "$TIME" -ge "$TIME_OUT" ]; then
+# echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
+#fi
diff --git a/test/csit/tests/dmaap/mrpubsub/mrpubsub.robot b/test/csit/tests/dmaap/mrpubsub/mrpubsub.robot
new file mode 100755
index 000000000..c711bd754
--- /dev/null
+++ b/test/csit/tests/dmaap/mrpubsub/mrpubsub.robot
@@ -0,0 +1,73 @@
+*** Settings ***
+Library OperatingSystem
+Library RequestsLibrary
+Library requests
+Library Collections
+Library String
+
+*** Variables ***
+${TARGETURL_PUBLISH} http://${DMAAP_MR_IP}:3904/events/TestTopic1
+${TARGETURL_TOPICS} http://${DMAAP_MR_IP}:3904/topics
+${TARGETURL_SUBSCR} http://${DMAAP_MR_IP}:3904/events/TestTopic1/CG1/C1?timeout=1000
+${TEST_DATA} {"topicName": "TestTopic1"}
+${TOPIC_DATA} {"topicName":"FirstTopic","topicDescription":"This is a TestTopic","partitionCount":"1","replicationCount":"3","transactionEnabled":"true"}
+
+*** Test Cases ***
+Run Topic Creation and Publish
+ [Documentation] Topic Creation
+ [Timeout] 1 minute
+ ${resp}= PostCall ${TARGETURL_PUBLISH} ${TEST_DATA}
+ log ${TARGETURL_PUBLISH}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ ${count}= Evaluate $resp.json().get('count')
+ log 'JSON Response Code:'${resp}
+
+Run Subscribing a message status
+ [Documentation] Subscribide message status
+ [Timeout] 1 minute
+ ${resp}= GetCall ${TARGETURL_SUBSCR}
+ log ${TARGETURL_SUBSCR}
+ Should Be Equal As Strings ${resp.status_code} 200
+ log 'JSON Response Code :'${resp}
+
+Run check topics are exisiting
+ [Documentation] Get the count of the Topics
+ [Timeout] 1 minute
+ ${resp}= GetCall ${TARGETURL_TOPICS}
+ log ${TARGETURL_TOPICS}
+ Should Be Equal As Strings ${resp.status_code} 200
+ log 'JSON Response Code :'${resp}
+ ${topics}= Evaluate $resp.json().get('topics')
+ log ${topics}
+ ${ListLength}= Get Length ${topics}
+ log ${ListLength}
+ List Should Contain Value ${topics} TestTopic1
+
+Run Publich and Subscribe a message
+ [Documentation] Publish and Subscribe the message
+ [Timeout] 1 minute
+ ${resp}= PostCall ${TARGETURL_PUBLISH} ${TEST_DATA}
+ log ${TARGETURL_PUBLISH}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ ${sub_resp}= GetCall ${TARGETURL_SUBSCR}
+ log ${TARGETURL_SUBSCR}
+ Should Be Equal As Strings ${sub_resp.status_code} 200
+ log 'JSON Response Code :'${sub_resp}
+ ${ListLength}= Get Length ${sub_resp.json()}
+ log ${ListLength}
+ List Should Contain Value ${sub_resp.json()} {"topicName":"TestTopic1"} case_insensitive=yes
+
+*** Keywords ***
+PostCall
+ [Arguments] ${url} ${data}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests
+ [Return] ${resp}
+
+GetCall
+ [Arguments] ${url}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Evaluate requests.get('${url}', headers=${headers}, verify=False) requests
+ [Return] ${resp}
diff --git a/test/csit/tests/holmes/testcase/CommonKeywords/HttpRequest.robot b/test/csit/tests/holmes/testcase/CommonKeywords/HttpRequest.robot
index 5b8417404..45bec5ef5 100644
--- a/test/csit/tests/holmes/testcase/CommonKeywords/HttpRequest.robot
+++ b/test/csit/tests/holmes/testcase/CommonKeywords/HttpRequest.robot
@@ -30,8 +30,8 @@ httpPost
[Return] ${postResponse}
httpDelete
- [Arguments] ${restHost} ${restUrl} ${data}
+ [Arguments] ${restHost} ${restUrl}
${headers} create dictionary Content-Type=application/json Accept=application/json
create session microservices ${restHost} ${headers}
- ${deleteResponse} delete request microservices ${restUrl} ${data}
+ ${deleteResponse} delete request microservices ${restUrl}
[Return] ${deleteResponse}
diff --git a/test/csit/tests/holmes/testcase/RuleMgt/Rule-Keywords.robot b/test/csit/tests/holmes/testcase/RuleMgt/Rule-Keywords.robot
index 03f840904..182737f54 100644
--- a/test/csit/tests/holmes/testcase/RuleMgt/Rule-Keywords.robot
+++ b/test/csit/tests/holmes/testcase/RuleMgt/Rule-Keywords.robot
@@ -72,8 +72,8 @@ modifyRule
[Return] ${response}
deleteRule
- [Arguments] ${jsonParam} ${codeFlag}=1
- ${response} httpDelete ${ruleMgtHost} ${ruleMgtUrl} ${jsonParam}
+ [Arguments] ${ruleId} ${codeFlag}=1
+ ${response} httpDelete ${ruleMgtHost} ${ruleMgtUrl}/${ruleId}
log ${response.content}
run keyword if ${codeFlag}==1 Should be equal as strings ${response.status_code} 200
run keyword if ${codeFlag}!=1 Should be equal as strings ${response.status_code} 499
diff --git a/test/csit/tests/holmes/testcase/RuleMgt/Rule-Mgt.robot b/test/csit/tests/holmes/testcase/RuleMgt/Rule-Mgt.robot
index 03ee70849..ad2a540fd 100644
--- a/test/csit/tests/holmes/testcase/RuleMgt/Rule-Mgt.robot
+++ b/test/csit/tests/holmes/testcase/RuleMgt/Rule-Mgt.robot
@@ -120,8 +120,8 @@ modify_rule_with_description
delete_existing_rule
[Documentation] Delete an existing rule.
should not be empty ${RULEID}
- deleteRule {"ruleid":"${RULEID}"}
+ deleteRule ${RULEID}
delete_non_existing_rule
[Documentation] Delete a non-existing rule.
- deleteRule {"ruleid":"${RULEID}"} -1
+ deleteRule ${RULEID} -1
diff --git a/test/csit/tests/sdc/healthCheck/__init__.robot b/test/csit/tests/sdc/healthCheck/__init__.robot
new file mode 100644
index 000000000..8ee10d5f6
--- /dev/null
+++ b/test/csit/tests/sdc/healthCheck/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Sdc - HealthCheck
diff --git a/test/csit/tests/sdc/healthCheck/test1.robot b/test/csit/tests/sdc/healthCheck/test1.robot
new file mode 100644
index 000000000..6d4dc242d
--- /dev/null
+++ b/test/csit/tests/sdc/healthCheck/test1.robot
@@ -0,0 +1,16 @@
+*** Settings ***
+Library Collections
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Test Cases ***
+Get Requests health check ok
+ [Tags] get
+ CreateSession sdc-be http://localhost:8080
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request sdc-be /sdc2/rest/healthCheck headers=&{headers}
+ Should Be Equal As Strings ${resp.status_code} 200
+ @{ITEMS}= Copy List ${resp.json()['componentsInfo']}
+ : FOR ${ELEMENT} IN @{ITEMS}
+ \ Log ${ELEMENT['healthCheckComponent']} ${ELEMENT['healthCheckStatus']}
diff --git a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
index b1aea584a..e0679fbd8 100644
--- a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
+++ b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot
@@ -1,4 +1,5 @@
*** settings ***
+Resource ../../common.robot
Library Collections
Library RequestsLibrary
Library simplejson
@@ -9,7 +10,7 @@ Library HttpLibrary.HTTP
*** Variables ***
@{return_ok_list}= 200 201 202 204
${queryswagger_url} /api/hwvnfm/v1/swagger.json
-${createauthtoken_url} /rest/vnfmmed/v2/auth/tokens
+${createauthtoken_url} /rest/plat/smapp/v1/oauth/token
#json files
${hwvnfm_createtoken_json} ${SCRIPTS}/../tests/vfc/nfvo-driver-svnfm/jsoninput/hwvnfm_createtoken.json
@@ -18,7 +19,7 @@ ${hwvnfm_createtoken_json} ${SCRIPTS}/../tests/vfc/nfvo-driver-svnfm/jsoninpu
SwaggerFuncTest
[Documentation] query swagger info rest test
${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${MSB_IP}:80 headers=${headers}
+ Create Session web_session http://${SERVICE_IP}:8482 headers=${headers}
${resp}= Get Request web_session ${queryswagger_url}
${responese_code}= Convert To String ${resp.status_code}
List Should Contain Value ${return_ok_list} ${responese_code}
@@ -31,10 +32,8 @@ AuthTokenFuncTest
${json_value}= json_from_file ${hwvnfm_createtoken_json}
${json_string}= string_from_json ${json_value}
${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${MSB_IP}:80 headers=${headers}
+ Create Session web_session http://${SERVICE_IP}:8482 headers=${headers}
Set Request Body ${json_string}
- ${resp}= Post Request web_session ${createauthtoken_url} ${json_string}
+ ${resp}= Put Request web_session ${createauthtoken_url} ${json_string}
${responese_code}= Convert To String ${resp.status_code}
- List Should Contain Value ${return_ok_list} ${responese_code}
- ${response_json} json.loads ${resp.content}
- Dictionary Should Contain Key ${response_json} token \ No newline at end of file
+ List Should Contain Value ${return_ok_list} ${responese_code} \ No newline at end of file