diff options
Diffstat (limited to 'test')
23 files changed, 858 insertions, 34 deletions
diff --git a/test/csit/plans/appc/healthcheck/bundle_query.sh b/test/csit/plans/appc/healthcheck/bundle_query.sh index 7224ae9c5..f163ce5c2 100755 --- a/test/csit/plans/appc/healthcheck/bundle_query.sh +++ b/test/csit/plans/appc/healthcheck/bundle_query.sh @@ -24,8 +24,8 @@ failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current echo "There are $num_failed_bundles failed bundles out of $num_bundles installed bundles." -if [ "$num_failed_bundles" -ge 1 ] || [ $num_bundles -le 393 ]; then - echo "There are $num_bundles bundles out of 394 with $num_failed_bundles in a failed state. " +if [ "$num_failed_bundles" -ge 1 ] || [ $num_bundles -le 400 ]; then + echo "There are $num_bundles bundles with $num_failed_bundles in a failed state. " echo "The following bundle(s) are in a failed state: " echo " $failed_bundles" exit 1; diff --git a/test/csit/plans/appc/healthcheck/db_query.sh b/test/csit/plans/appc/healthcheck/db_query.sh index 87e0ac397..70829a13a 100755 --- a/test/csit/plans/appc/healthcheck/db_query.sh +++ b/test/csit/plans/appc/healthcheck/db_query.sh @@ -41,20 +41,20 @@ else exit 1; fi -if [ "$NODE_TYPES" -eq "0" ]; then - echo "There is no data in table NODE_TYPES. " - exit 1; -fi +#if [ "$NODE_TYPES" -eq "0" ]; then +# echo "There is no data in table NODE_TYPES. " +# exit 1; +#fi -if [ "$SVC_LOGIC" -eq "0" ] ; then - echo "There is no data in table SVC_LOGIC. " - exit 1; -fi +#if [ "$SVC_LOGIC" -eq "0" ] ; then +# echo "There is no data in table SVC_LOGIC. " +# exit 1; +#fi -if [ "$VNF_DG_MAPPING" -eq "0" ]; then - echo "There is no data in table VNF_DG_MAPPING. " - exit 1; -fi +#if [ "$VNF_DG_MAPPING" -eq "0" ]; then +# echo "There is no data in table VNF_DG_MAPPING. " +# exit 1; +#fi echo "Expected table data is present." exit 0 ) diff --git a/test/csit/plans/appc/healthcheck/setup.sh b/test/csit/plans/appc/healthcheck/setup.sh index 3c57cefac..eaf488a65 100755 --- a/test/csit/plans/appc/healthcheck/setup.sh +++ b/test/csit/plans/appc/healthcheck/setup.sh @@ -20,6 +20,12 @@ SCRIPTS="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source ${WORKSPACE}/test/csit/scripts/appc/script1.sh +export NEXUS_USERNAME=docker +export NEXUS_PASSWD=docker +export NEXUS_DOCKER_REPO=nexus3.onap.org:10001 +export DMAAP_TOPIC=AUTO +export DOCKER_IMAGE_VERSION=1.1.0-SNAPSHOT-latest + export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1) if [ "$MTU" == "" ]; then @@ -36,14 +42,14 @@ git pull unset http_proxy https_proxy cd $WORKSPACE/archives/appc/docker-compose -sed -i "s/DMAAP_TOPIC_ENV=.*/DMAAP_TOPIC_ENV="AUTO"/g" docker-compose.yml -docker login -u docker -p docker nexus3.onap.org:10001 +sed -i "s/DMAAP_TOPIC_ENV=.*/DMAAP_TOPIC_ENV="$DMAAP_TOPIC"/g" docker-compose.yml +docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO -docker pull nexus3.onap.org:10001/openecomp/appc-image:1.1-STAGING-latest -docker tag nexus3.onap.org:10001/openecomp/appc-image:1.1-STAGING-latest openecomp/appc-image:latest +docker pull $NEXUS_DOCKER_REPO/openecomp/appc-image:$DOCKER_IMAGE_VERSION +docker tag $NEXUS_DOCKER_REPO/openecomp/appc-image:$DOCKER_IMAGE_VERSION openecomp/appc-image:latest -docker pull nexus3.onap.org:10001/openecomp/dgbuilder-sdnc-image:1.1-STAGING-latest -docker tag nexus3.onap.org:10001/openecomp/dgbuilder-sdnc-image:1.1-STAGING-latest openecomp/dgbuilder-sdnc-image:latest +docker pull $NEXUS_DOCKER_REPO/onap/ccsdk-dgbuilder-image:latest +docker tag $NEXUS_DOCKER_REPO/onap/ccsdk-dgbuilder-image:latest onap/ccsdk-dgbuilder-image:latest # start APPC containers with docker compose and configuration from docker-compose.yml docker-compose up -d @@ -71,7 +77,7 @@ fi #sleep 800 -TIME_OUT=1500 +TIME_OUT=1000 INTERVAL=60 TIME=0 while [ "$TIME" -lt "$TIME_OUT" ]; do @@ -79,7 +85,7 @@ while [ "$TIME" -lt "$TIME_OUT" ]; do response=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf system:start-level) num_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | tail -1 | cut -d\| -f1) - if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 394 ]; then + if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 400 ]; then echo APPC karaf started in $TIME seconds break; fi @@ -96,7 +102,7 @@ fi response=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf system:start-level) num_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | tail -1 | cut -d\| -f1) - if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 394 ]; then + if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 400 ]; then num_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | tail -1 | cut -d\| -f1) num_failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Failure | wc -l) failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Failure) @@ -111,3 +117,9 @@ fi # Pass any variables required by Robot test suites in ROBOT_VARIABLES ROBOT_VARIABLES="-v SCRIPTS:${SCRIPTS}" +if [ "$response" == "" ] || [ "$num_bundles" == "" ]; then + echo "Docker container appc_controller_container is not available. Exiting." + exit 1 +fi + + diff --git a/test/csit/plans/appc/healthcheck/testplan.txt b/test/csit/plans/appc/healthcheck/testplan.txt index 2a8c1ea84..fbf2319a7 100644 --- a/test/csit/plans/appc/healthcheck/testplan.txt +++ b/test/csit/plans/appc/healthcheck/testplan.txt @@ -1,4 +1,5 @@ # Test suites are relative paths under [integration.git]/test/csit/tests/. # Place the suites in run order. appc/healthcheck +#appc/testsuite diff --git a/test/csit/plans/sdc/healthCheck/setup.sh b/test/csit/plans/sdc/healthCheck/setup.sh new file mode 100644 index 000000000..f247be656 --- /dev/null +++ b/test/csit/plans/sdc/healthCheck/setup.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Copyright 2016-2017 Huawei Technologies Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Modifications copyright (c) 2017 AT&T Intellectual Property +# +# Place the scripts in run order: + + +source ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh + +source ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh + + +BE_IP=`get-instance-ip.sh sdc-BE` +echo BE_IP=${BE_IP} + + +# Pass any variables required by Robot test suites in ROBOT_VARIABLES +ROBOT_VARIABLES="-v BE_IP:${BE_IP}" + diff --git a/test/csit/plans/sdc/healthCheck/teardown.sh b/test/csit/plans/sdc/healthCheck/teardown.sh new file mode 100644 index 000000000..a5f69819e --- /dev/null +++ b/test/csit/plans/sdc/healthCheck/teardown.sh @@ -0,0 +1,22 @@ +#!/bin/bash +# +# Copyright 2016-2017 Huawei Technologies Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Modifications copyright (c) 2017 AT&T Intellectual Property +# + +source ${WORKSPACE}/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh + +# $WORKSPACE/archives/clamp-clone deleted with archives folder when tests starts so we keep it at the end for debugging diff --git a/test/csit/plans/sdc/healthCheck/testplan.txt b/test/csit/plans/sdc/healthCheck/testplan.txt new file mode 100644 index 000000000..2b2db1ede --- /dev/null +++ b/test/csit/plans/sdc/healthCheck/testplan.txt @@ -0,0 +1,3 @@ +# Test suites are relative paths under [integration.git]/test/csit/tests/. +# Place the suites in run order. +sdc/healthCheck diff --git a/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh new file mode 100644 index 000000000..da421e4cf --- /dev/null +++ b/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh @@ -0,0 +1,52 @@ +#!/bin/bash +# +# ============LICENSE_START======================================================= +# ONAP CLAMP +# ================================================================================ +# Copyright (C) 2017 AT&T Intellectual Property. All rights +# reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END============================================ +# =================================================================== +# ECOMP is a trademark and service mark of AT&T Intellectual Property. +# + +echo "This is ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh" + +# Clone sdc enviroment template +mkdir -p ${WORKSPACE}/data/environments/ +mkdir -p ${WORKSPACE}/data/clone/ + +cd ${WORKSPACE}/data/clone +git clone --depth 1 http://gerrit.onap.org/r/sdc -b master + + +# set enviroment variables + +ENV_NAME=CSIT +MR_IP_ADDR=10.0.0.1 + +if [ -e /opt/config/public_ip.txt ] + then + IP_ADDRESS=$(cat /opt/config/public_ip.txt) + else + IP_ADDRESS=$(ifconfig eth0 | grep "inet addr" | tr -s ' ' | cut -d' ' -f3 | cut -d':' -f2) + fi + + cat ${WORKSPACE}/data/clone/sdc/sdc-os-chef/environments/Template.json | sed "s/yyy/"$IP_ADDRESS"/g" > ${WORKSPACE}/data/environments/$ENV_NAME.json + sed -i "s/xxx/"$ENV_NAME"/g" ${WORKSPACE}/data/environments/$ENV_NAME.json + sed -i "s/\"ueb_url_list\":.*/\"ueb_url_list\": \""$MR_IP_ADDR","$MR_IP_ADDR"\",/g" ${WORKSPACE}/data/environments/$ENV_NAME.json + sed -i "s/\"fqdn\":.*/\"fqdn\": [\""$MR_IP_ADDR"\", \""$MR_IP_ADDR"\"]/g" ${WORKSPACE}/data/environments/$ENV_NAME.json + + diff --git a/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh b/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh new file mode 100644 index 000000000..e03284248 --- /dev/null +++ b/test/csit/scripts/sdc/kill_containers_and_remove_dataFolders.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# +# Copyright 2016-2017 Huawei Technologies Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Modifications copyright (c) 2017 AT&T Intellectual Property +# + +echo "This is ${WORKSPACE}/test/csit/scripts/sdc/kill_and_remove_dataFolder.sh" + +#kill and remove all sdc dockers +docker stop $(docker ps -a -q --filter="name=sdc") +docker rm $(docker ps -a -q --filter="name=sdc") + + +#delete data folder + +rm -rf ${WORKSPACE}/data/* + + diff --git a/test/csit/scripts/sdc/start_sdc_containers.sh b/test/csit/scripts/sdc/start_sdc_containers.sh new file mode 100644 index 000000000..31105acb0 --- /dev/null +++ b/test/csit/scripts/sdc/start_sdc_containers.sh @@ -0,0 +1,108 @@ +#!/bin/bash +# +# ============LICENSE_START======================================================= +# ONAP CLAMP +# ================================================================================ +# Copyright (C) 2017 AT&T Intellectual Property. All rights +# reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END============================================ +# =================================================================== +# ECOMP is a trademark and service mark of AT&T Intellectual Property. +# + +echo "This is ${WORKSPACE}/test/csit/scripts/sdc/start_sdc_containers.sh" + + +RELEASE=latest +LOCAL=false +SKIPTESTS=false +DEP_ENV=CSIT +#[ -f /opt/config/nexus_username.txt ] && NEXUS_USERNAME=$(cat /opt/config/nexus_username.txt) || NEXUS_USERNAME=release +#[ -f /opt/config/nexus_password.txt ] && NEXUS_PASSWD=$(cat /opt/config/nexus_password.txt) || NEXUS_PASSWD=sfWU3DFVdBr7GVxB85mTYgAW +#[ -f /opt/config/nexus_docker_repo.txt ] && NEXUS_DOCKER_REPO=$(cat /opt/config/nexus_docker_repo.txt) || NEXUS_DOCKER_REPO=ecomp-nexus:${PORT} +#[ -f /opt/config/nexus_username.txt ] && docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO +export IP=`ifconfig eth0 | awk -F: '/inet addr/ {gsub(/ .*/,"",$2); print $2}'` +#export PREFIX=${NEXUS_DOCKER_REPO}'/openecomp' +export PREFIX='nexus3.onap.org:10001/openecomp' + +#start Elastic-Search +docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --memory 1g --memory-swap=1g --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro -e ES_HEAP_SIZE=1024M --volume ${WORKSPACE}/data/ES:/usr/share/elasticsearch/data --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9200:9200 --publish 9300:9300 ${PREFIX}/sdc-elasticsearch:${RELEASE} + +#start cassandra +docker run --detach --name sdc-cs --env RELEASE="${RELEASE}" --env ENVNAME="${DEP_ENV}" --env HOST_IP=${IP} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/CS:/var/lib/cassandra --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9042:9042 --publish 9160:9160 ${PREFIX}/sdc-cassandra:${RELEASE} + +echo "please wait while CS is starting..." +echo "" +c=120 # seconds to wait +REWRITE="\e[25D\e[1A\e[K" +while [ $c -gt 0 ]; do + c=$((c-1)) + sleep 1 + echo -e "${REWRITE}$c" +done +echo -e "" + + +#start kibana +docker run --detach --name sdc-kbn --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 5601:5601 ${PREFIX}/sdc-kibana:${RELEASE} + +#start sdc-backend +docker run --detach --name sdc-BE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/BE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 8443:8443 --publish 8080:8080 ${PREFIX}/sdc-backend:${RELEASE} + +echo "please wait while BE is starting..." +echo "" +c=120 # seconds to wait +REWRITE="\e[45D\e[1A\e[K" +while [ $c -gt 0 ]; do + c=$((c-1)) + sleep 1 + echo -e "${REWRITE}$c" +done +echo -e "" + +#start Front-End +docker run --detach --name sdc-FE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/FE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9443:9443 --publish 8181:8181 ${PREFIX}/sdc-frontend:${RELEASE} + +echo "please wait while FE is starting..." +echo "" +c=120 # seconds to wait +REWRITE="\e[45D\e[1A\e[K" +while [ $c -gt 0 ]; do + c=$((c-1)) + sleep 1 + echo -e "${REWRITE}$c" +done +echo -e "" + + + + +#TIME=0 +#while [ "$TIME" -lt "$TIME_OUT" ]; do +# response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:8080/restservices/clds/v1/clds/healthcheck); echo $response + +# if [ "$response" == "200" ]; then +# echo Clamp and its database well started in $TIME seconds +# break; +# fi + +# echo Sleep: $INTERVAL seconds before testing if Clamp is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds +# sleep $INTERVAL +# TIME=$(($TIME+$INTERVAL)) +#done + +#if [ "$TIME" -ge "$TIME_OUT" ]; then +# echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests... +#fi diff --git a/test/csit/tests/sdc/healthCheck/__init__.robot b/test/csit/tests/sdc/healthCheck/__init__.robot new file mode 100644 index 000000000..8ee10d5f6 --- /dev/null +++ b/test/csit/tests/sdc/healthCheck/__init__.robot @@ -0,0 +1,2 @@ +*** Settings *** +Documentation Sdc - HealthCheck diff --git a/test/csit/tests/sdc/healthCheck/test1.robot b/test/csit/tests/sdc/healthCheck/test1.robot new file mode 100644 index 000000000..6d4dc242d --- /dev/null +++ b/test/csit/tests/sdc/healthCheck/test1.robot @@ -0,0 +1,16 @@ +*** Settings *** +Library Collections +Library OperatingSystem +Library RequestsLibrary +Library json + +*** Test Cases *** +Get Requests health check ok + [Tags] get + CreateSession sdc-be http://localhost:8080 + ${headers}= Create Dictionary Accept=application/json Content-Type=application/json + ${resp}= Get Request sdc-be /sdc2/rest/healthCheck headers=&{headers} + Should Be Equal As Strings ${resp.status_code} 200 + @{ITEMS}= Copy List ${resp.json()['componentsInfo']} + : FOR ${ELEMENT} IN @{ITEMS} + \ Log ${ELEMENT['healthCheckComponent']} ${ELEMENT['healthCheckStatus']} diff --git a/test/csit/tests/so/sanity-check/sanity_test_so.robot b/test/csit/tests/so/sanity-check/sanity_test_so.robot index 2e05c50f1..73a9f3f1f 100644 --- a/test/csit/tests/so/sanity-check/sanity_test_so.robot +++ b/test/csit/tests/so/sanity-check/sanity_test_so.robot @@ -13,59 +13,59 @@ Create ServiceInstance for invalid input Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}createService.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v2 data=${data} headers=${headers} + ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v3 data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Create ServiceInstance for invalid user Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}createService.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQxOnBhc3N3b3JkMTI= Content-Type=application/json Accept=application/json - ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v2 data=${data} headers=${headers} + ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v3 data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Delete ServiceInstance for invalid input Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}deleteService.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v2/ff305d54-75b4-431b-adb2-eb6b9e5ff000 data=${data} headers=${headers} + ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000 data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Delete ServiceInstance for invalid user Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}deleteService.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQxOnBhc3N3b3JkMTI== Content-Type=application/json Accept=application/json - ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v2/ff305d54-75b4-431b-adb2-eb6b9e5ff000 data=${data} headers=${headers} + ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000 data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result SO ServiceInstance health check Create Session refrepo http://${REPO_IP}:8080 &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Get Request refrepo /ecomp/mso/infra/orchestrationRequests/v2/rq1234d1-5a33-55df-13ab-12abad84e333 headers=${headers} + ${resp}= Get Request refrepo /ecomp/mso/infra/orchestrationRequests/v3/rq1234d1-5a33-55df-13ab-12abad84e333 headers=${headers} Should Not Contain ${resp.content} null Create VnfInstance for invalid input Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}createVnf.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v2/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs data=${data} headers=${headers} + ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Create VnfInstance for invalid credential Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}createVnf.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQxOnBhc3N3b3JkMTI= Content-Type=application/json Accept=application/json - ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v2/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs data=${data} headers=${headers} + ${resp}= Post Request refrepo /ecomp/mso/infra/serviceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Delete VnfInstance for invalid input Create Session refrepo http://${REPO_IP}:8080 ${data}= Get Binary File ${CURDIR}${/}data${/}deleteVnf.json &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v2/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs/aca51b0a-710d-4155-bc7c-7cef19d9a94e data=${data} headers=${headers} + ${resp}= Delete Request refrepo /ecomp/mso/infra/serviceInstances/v3/ff305d54-75b4-431b-adb2-eb6b9e5ff000/vnfs/aca51b0a-710d-4155-bc7c-7cef19d9a94e data=${data} headers=${headers} Run Keyword If '${resp.status_code}' == '400' or '${resp.status_code}' == '404' or '${resp.status_code}' == '405' log to console \nexecuted with expected result Get Orchestration Requests Create Session refrepo http://${REPO_IP}:8080 &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json - ${resp}= Get Request refrepo /ecomp/mso/infra/orchestrationRequests/v2 headers=${headers} + ${resp}= Get Request refrepo /ecomp/mso/infra/orchestrationRequests/v3 headers=${headers} Should Not Contain ${resp.content} null
\ No newline at end of file diff --git a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot index 3277e7782..9bb85b891 100644 --- a/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot +++ b/test/csit/tests/vfc/nfvo-driver-svnfm/huawei.robot @@ -1,4 +1,5 @@ *** settings *** +Resource ../../common.robot Library Collections Library RequestsLibrary Library simplejson @@ -7,10 +8,34 @@ Library json Library HttpLibrary.HTTP *** Variables *** -@{return_ok_list}= 200 201 202 +@{return_ok_list}= 200 201 202 204 ${queryswagger_url} /api/hwvnfm/v1/swagger.json +${createauthtoken_url} /rest/vnfmmed/v2/auth/tokens + +#json files +${hwvnfm_createtoken_json} ${SCRIPTS}/../tests/vfc/nfvo-driver-svnfm/jsoninput/hwvnfm_createtoken.json *** Test Cases *** SwaggerFuncTest [Documentation] query swagger info rest test - Should Be Equal 2.0 2.0 + ${headers} Create Dictionary Content-Type=application/json Accept=application/json + Create Session web_session http://${SERVICE_IP}:8482 headers=${headers} + ${resp}= Get Request web_session ${queryswagger_url} + ${responese_code}= Convert To String ${resp.status_code} + List Should Contain Value ${return_ok_list} ${responese_code} + ${response_json} json.loads ${resp.content} + ${swagger_version}= Convert To String ${response_json['swagger']} + Should Be Equal ${swagger_version} 2.0 + +AuthTokenFuncTest + [Documentation] create auth token rest test + ${json_value}= json_from_file ${hwvnfm_createtoken_json} + ${json_string}= string_from_json ${json_value} + ${headers} Create Dictionary Content-Type=application/json Accept=application/json + Create Session web_session http://${SERVICE_IP}:8482 headers=${headers} + Set Request Body ${json_string} + ${resp}= Post Request web_session ${createauthtoken_url} ${json_string} + ${responese_code}= Convert To String ${resp.status_code} + List Should Contain Value ${return_ok_list} ${responese_code} + ${response_json} json.loads ${resp.content} + Dictionary Should Contain Key ${response_json} token
\ No newline at end of file diff --git a/test/csit/tests/vfc/nfvo-driver-svnfm/jsoninput/hwvnfm_createtoken.json b/test/csit/tests/vfc/nfvo-driver-svnfm/jsoninput/hwvnfm_createtoken.json new file mode 100644 index 000000000..e9a6c3e92 --- /dev/null +++ b/test/csit/tests/vfc/nfvo-driver-svnfm/jsoninput/hwvnfm_createtoken.json @@ -0,0 +1,13 @@ +{
+ "auth": {
+ "identity": {
+ "methods": ["password"],
+ "password": {
+ "user": {
+ "name": "admin",
+ "password": "User@12345"
+ }
+ }
+ }
+ }
+}
\ No newline at end of file diff --git a/test/mock/pom.xml b/test/mock/pom.xml new file mode 100644 index 000000000..56aad78f2 --- /dev/null +++ b/test/mock/pom.xml @@ -0,0 +1,142 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + <groupId>org.onap.integration</groupId> + <artifactId>mock</artifactId> + <version>1.0-SNAPSHOT</version> + <name>mock</name> + <description>onap emulator project based on Spring Boot</description> + +<parent> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-parent</artifactId> + <version>1.5.7.RELEASE</version> + <relativePath/> <!-- lookup parent from repository --> +</parent> + +<properties> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> + <java.version>1.8</java.version> + <versions.jackson>2.8.9</versions.jackson> + <jetty.version>9.2.22.v20170606</jetty.version> +</properties> + +<dependencies> + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter</artifactId> + </dependency> + + <dependency> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-starter-test</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.springframework.cloud</groupId> + <artifactId>spring-cloud-contract-wiremock</artifactId> + <version>1.1.3.RELEASE</version> + <scope>compile</scope> + </dependency> + <dependency> + <groupId>net.sf.jopt-simple</groupId> + <artifactId>jopt-simple</artifactId> + <version>5.0.3</version> + </dependency> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> + <version>9.2.22.v20170606</version> + </dependency> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-servlet</artifactId> + <version>9.2.22.v20170606</version> + </dependency> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-servlets</artifactId> + <version>9.2.22.v20170606</version> + </dependency> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-webapp</artifactId> + <version>9.2.22.v20170606</version> + </dependency> + <dependency> + <groupId>com.google.guava</groupId> + <artifactId>guava</artifactId> + <version>20.0</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-core</artifactId> + <version>${versions.jackson}</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-annotations</artifactId> + <version>${versions.jackson}</version> + </dependency> + <dependency> + <groupId>com.fasterxml.jackson.core</groupId> + <artifactId>jackson-databind</artifactId> + <version>${versions.jackson}</version> + </dependency> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </dependency> + <dependency> + <groupId>org.xmlunit</groupId> + <artifactId>xmlunit-core</artifactId> + <version>2.3.0</version> + </dependency> + <dependency> + <groupId>org.xmlunit</groupId> + <artifactId>xmlunit-legacy</artifactId> + <version>2.3.0</version> + </dependency> + <dependency> + <groupId>com.jayway.jsonpath</groupId> + <artifactId>json-path</artifactId> + <version>2.4.0</version> + </dependency> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + <version>1.7.12</version> + </dependency> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + <version>3.6</version> + </dependency> + <dependency> + <groupId>com.flipkart.zjsonpatch</groupId> + <artifactId>zjsonpatch</artifactId> + <version>0.3.0</version> + </dependency> + <dependency> + <groupId>com.github.jknack</groupId> + <artifactId>handlebars</artifactId> + <version>4.0.6</version> + </dependency> +</dependencies> + +<build> + <finalName>${project.artifactId}</finalName> + <plugins> + <plugin> + <groupId>org.springframework.boot</groupId> + <artifactId>spring-boot-maven-plugin</artifactId> + </plugin> + <plugin> + <artifactId>maven-dependency-plugin</artifactId> + </plugin> + </plugins> +</build> +</project> diff --git a/test/mock/src/main/docker/Dockerfile b/test/mock/src/main/docker/Dockerfile new file mode 100644 index 000000000..b1bf4d93c --- /dev/null +++ b/test/mock/src/main/docker/Dockerfile @@ -0,0 +1,19 @@ +FROM openjdk:8-jre + +MAINTAINER Geora Barsky <georab@amdocs.com> + +RUN mkdir -p /var/wiremock/lib/ + +ADD mock.jar /var/wiremock/lib/app.jar + +WORKDIR /home/wiremock + +COPY docker-entrypoint.sh / +RUN chmod 700 /docker-entrypoint.sh + +VOLUME /home/wiremock +EXPOSE 8080 8081 9999 + +ENTRYPOINT ["/docker-entrypoint.sh"] + +CMD ["java", "-jar","/var/wiremock/lib/app.jar"]
\ No newline at end of file diff --git a/test/mock/src/main/docker/docker-entrypoint.sh b/test/mock/src/main/docker/docker-entrypoint.sh new file mode 100644 index 000000000..47364a270 --- /dev/null +++ b/test/mock/src/main/docker/docker-entrypoint.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +touch /app.jar + +java -Xms1024m -Xmx1024m -jar /var/wiremock/lib/app.jar
\ No newline at end of file diff --git a/test/mock/src/main/java/org/onap/integration/test/mock/MockApplication.java b/test/mock/src/main/java/org/onap/integration/test/mock/MockApplication.java new file mode 100644 index 000000000..115cb252a --- /dev/null +++ b/test/mock/src/main/java/org/onap/integration/test/mock/MockApplication.java @@ -0,0 +1,122 @@ +package org.onap.integration.test.mock; + +import static com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder.responseDefinition; +import static com.github.tomakehurst.wiremock.client.WireMock.anyUrl; +import static com.github.tomakehurst.wiremock.core.WireMockApp.FILES_ROOT; +import static com.github.tomakehurst.wiremock.core.WireMockApp.MAPPINGS_ROOT; +import static com.github.tomakehurst.wiremock.http.RequestMethod.ANY; +import static com.github.tomakehurst.wiremock.matching.RequestPatternBuilder.newRequestPattern; +import static java.lang.System.out; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.common.ConsoleNotifier; +import com.github.tomakehurst.wiremock.common.FatalStartupException; +import com.github.tomakehurst.wiremock.common.FileSource; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import com.github.tomakehurst.wiremock.http.ResponseDefinition; +import com.github.tomakehurst.wiremock.matching.RequestPattern; +import com.github.tomakehurst.wiremock.standalone.MappingsLoader; +import com.github.tomakehurst.wiremock.stubbing.StubMapping; +import com.github.tomakehurst.wiremock.stubbing.StubMappings; + +@SpringBootApplication +public class MockApplication { + + + private static final String BANNER= " \n" + +" ******** **** **** ## \n" + +" **######** ###* *### ## \n" + +" *##******##* ##*** ***## ##\n" + +" **#* *#** ##*#* *#*## ## \n" + +" *#* *#* ##****** ******* ##****** ##*#* *#*## ******* ****** ## *** \n" + +" *#* *#* ##*####* *######* ##*####** ##*#* *#*## **#####** **####** ## *#** \n" + +" *#* *#* ##****#* *#****#* ##** **#* ## *** *** ## *#** **#* *#****#* ## **#** \n" + +" *# #* ##* *#* #* ##* *#* ## *#* *#* ## *#* *#* *#* *#* ##**#** \n" + +" *#* *#* ##* ## ****## ##* *#* ## *#* *#* ## *#* *#* *#* ##*##* \n" + +" *#* *#* ## ## **###### ## #* ## *#* *#* ## *# #* *# ##**#** \n" + +" *#* *#* ## ## *#****## ##* *#* ## *#*#* ## *#* *#* *#* ##**##* \n" + +" **#* *#** ## ## *#* *## ##* *#* ## *#*#* ## *#* *#* *#* *#* ## *#** \n" + +" *##******##* ## ## *#* **##* ##** **#* ## *#*#* ## *#** **#* *#****#* ## **#* \n" + +" **######** ## ## *#######* ##*####* ## *###* ## **#####** **####** ## *#** \n" + +" ******** ## ## *******#* ##****** ## *#* ## ******* ****** ## *#* \n" + +" ## \n" + +" ## \n" + +" ## \n" + +" ** \n" ; + + static { + System.setProperty("org.mortbay.log.class", "com.github.tomakehurst.wiremock.jetty.LoggerAdapter"); + } + + private WireMockServer wireMockServer; + + public static void main(String[] args) { + SpringApplication.run(MockApplication.class, args); + //new WireMockServerRunner().run("--port 9999"); + new MockApplication().run(args); + } + + public void run(String... args) { + + WireMockConfiguration options = WireMockConfiguration.options(); + options.port(9999); + FileSource fileSource = options.filesRoot(); + fileSource.createIfNecessary(); + FileSource filesFileSource = fileSource.child(FILES_ROOT); + filesFileSource.createIfNecessary(); + FileSource mappingsFileSource = fileSource.child(MAPPINGS_ROOT); + mappingsFileSource.createIfNecessary(); + + // Register extension + options.extensions("org.onap.integration.test.mock.extension.Webhooks"); + // Register notifier + options.notifier(new ConsoleNotifier(true)); + wireMockServer = new WireMockServer(options); + + wireMockServer.enableRecordMappings(mappingsFileSource, filesFileSource); + + //if (options.specifiesProxyUrl()) { + // addProxyMapping(options.proxyUrl()); + //} + + try { + wireMockServer.start(); + out.println(BANNER); + out.println(); + out.println(options); + } catch (FatalStartupException e) { + System.err.println(e.getMessage()); + System.exit(1); + } + } + + private void addProxyMapping(final String baseUrl) { + wireMockServer.loadMappingsUsing(new MappingsLoader() { + @Override + public void loadMappingsInto(StubMappings stubMappings) { + RequestPattern requestPattern = newRequestPattern(ANY, anyUrl()).build(); + ResponseDefinition responseDef = responseDefinition() + .proxiedFrom(baseUrl) + .build(); + + StubMapping proxyBasedMapping = new StubMapping(requestPattern, responseDef); + proxyBasedMapping.setPriority(10); // Make it low priority so that existing stubs will take precedence + stubMappings.addMapping(proxyBasedMapping); + } + }); + } + + public void stop() { + wireMockServer.stop(); + } + + public boolean isRunning() { + return wireMockServer.isRunning(); + } + + public int port() { return wireMockServer.port(); } + +} diff --git a/test/mock/src/main/java/org/onap/integration/test/mock/extension/WebhookDefinition.java b/test/mock/src/main/java/org/onap/integration/test/mock/extension/WebhookDefinition.java new file mode 100644 index 000000000..dff99fd41 --- /dev/null +++ b/test/mock/src/main/java/org/onap/integration/test/mock/extension/WebhookDefinition.java @@ -0,0 +1,101 @@ +package org.onap.integration.test.mock.extension; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.github.tomakehurst.wiremock.http.Body; +import com.github.tomakehurst.wiremock.http.HttpHeader; +import com.github.tomakehurst.wiremock.http.HttpHeaders; +import com.github.tomakehurst.wiremock.http.RequestMethod; + +import java.net.URI; +import java.util.List; + +import static com.google.common.collect.Lists.newArrayList; + +public class WebhookDefinition { + + private RequestMethod method; + private URI url; + private List<HttpHeader> headers; + private Body body = Body.none(); + + @JsonCreator + public WebhookDefinition(@JsonProperty("method") RequestMethod method, + @JsonProperty("url") URI url, + @JsonProperty("headers") HttpHeaders headers, + @JsonProperty("body") String body, + @JsonProperty("base64Body") String base64Body) { + this.method = method; + this.url = url; + this.headers = newArrayList(headers.all()); + this.body = Body.fromOneOf(null, body, null, base64Body); + } + + public WebhookDefinition() { + } + + public RequestMethod getMethod() { + return method; + } + + public URI getUrl() { + return url; + } + + public HttpHeaders getHeaders() { + return new HttpHeaders(headers); + } + + public String getBase64Body() { + return body.isBinary() ? body.asBase64() : null; + } + + public String getBody() { + return body.isBinary() ? null : body.asString(); + } + + @JsonIgnore + public byte[] getBinaryBody() { + return body.asBytes(); + } + + public WebhookDefinition withMethod(RequestMethod method) { + this.method = method; + return this; + } + + public WebhookDefinition withUrl(URI url) { + this.url = url; + return this; + } + + public WebhookDefinition withUrl(String url) { + withUrl(URI.create(url)); + return this; + } + + public WebhookDefinition withHeaders(List<HttpHeader> headers) { + this.headers = headers; + return this; + } + + public WebhookDefinition withHeader(String key, String... values) { + if (headers == null) { + headers = newArrayList(); + } + + headers.add(new HttpHeader(key, values)); + return this; + } + + public WebhookDefinition withBody(String body) { + this.body = new Body(body); + return this; + } + + public WebhookDefinition withBinaryBody(byte[] body) { + this.body = new Body(body); + return this; + } +} diff --git a/test/mock/src/main/java/org/onap/integration/test/mock/extension/Webhooks.java b/test/mock/src/main/java/org/onap/integration/test/mock/extension/Webhooks.java new file mode 100644 index 000000000..cb17ba658 --- /dev/null +++ b/test/mock/src/main/java/org/onap/integration/test/mock/extension/Webhooks.java @@ -0,0 +1,100 @@ +package org.onap.integration.test.mock.extension; + +import com.github.tomakehurst.wiremock.common.Notifier; +import com.github.tomakehurst.wiremock.core.Admin; +import com.github.tomakehurst.wiremock.extension.Parameters; +import com.github.tomakehurst.wiremock.extension.PostServeAction; +import com.github.tomakehurst.wiremock.http.HttpClientFactory; +import com.github.tomakehurst.wiremock.http.HttpHeader; +import com.github.tomakehurst.wiremock.stubbing.ServeEvent; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.util.EntityUtils; + +import java.io.IOException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +import static com.github.tomakehurst.wiremock.common.Exceptions.throwUnchecked; +import static com.github.tomakehurst.wiremock.common.LocalNotifier.notifier; +import static com.github.tomakehurst.wiremock.http.HttpClientFactory.getHttpRequestFor; +import static java.util.concurrent.TimeUnit.SECONDS; + +public class Webhooks extends PostServeAction { + + private final ScheduledExecutorService scheduler; + private final HttpClient httpClient; + + public Webhooks() { + scheduler = Executors.newScheduledThreadPool(10); + httpClient = HttpClientFactory.createClient(); + } + + @Override + public String getName() { + return "webhook"; + } + + @Override + public void doAction(ServeEvent serveEvent, Admin admin, Parameters parameters) { + final WebhookDefinition definition = parameters.as(WebhookDefinition.class); + final Notifier notifier = notifier(); + + scheduler.schedule( + new Runnable() { + @Override + public void run() { + HttpUriRequest request = buildRequest(definition); + + try { + HttpResponse response = httpClient.execute(request); + notifier.info( + String.format("Webhook %s request to %s returned status %s\n\n%s", + definition.getMethod(), + definition.getUrl(), + response.getStatusLine(), + EntityUtils.toString(response.getEntity()) + ) + ); + System.out.println(String.format("Webhook %s request to %s returned status %s\n\n%s", + definition.getMethod(), + definition.getUrl(), + response.getStatusLine(), + EntityUtils.toString(response.getEntity()) + ) + ); + } catch (IOException e) { + throwUnchecked(e); + } + } + }, + 0L, + SECONDS + ); + } + + private static HttpUriRequest buildRequest(WebhookDefinition definition) { + HttpUriRequest request = getHttpRequestFor( + definition.getMethod(), + definition.getUrl().toString() + ); + + for (HttpHeader header: definition.getHeaders().all()) { + request.addHeader(header.key(), header.firstValue()); + } + + if (definition.getMethod().hasEntity()) { + HttpEntityEnclosingRequestBase entityRequest = (HttpEntityEnclosingRequestBase) request; + entityRequest.setEntity(new ByteArrayEntity(definition.getBinaryBody())); + } + + return request; + } + + public static WebhookDefinition webhook() { + return new WebhookDefinition(); + } +} diff --git a/test/mock/src/main/resources/application.properties b/test/mock/src/main/resources/application.properties new file mode 100644 index 000000000..51ad5ebf4 --- /dev/null +++ b/test/mock/src/main/resources/application.properties @@ -0,0 +1 @@ +server.port=9090 diff --git a/test/mock/src/test/java/org/onap/integration/test/mock/MockApplicationTests.java b/test/mock/src/test/java/org/onap/integration/test/mock/MockApplicationTests.java new file mode 100644 index 000000000..8d2a04625 --- /dev/null +++ b/test/mock/src/test/java/org/onap/integration/test/mock/MockApplicationTests.java @@ -0,0 +1,16 @@ +package org.onap.integration.test.mock; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest +public class MockApplicationTests { + + @Test + public void contextLoads() { + } + +} |