aboutsummaryrefslogtreecommitdiffstats
path: root/test/csit
diff options
context:
space:
mode:
Diffstat (limited to 'test/csit')
-rw-r--r--test/csit/plans/vvp/sanity/setup.sh2
-rw-r--r--test/csit/scripts/vvp/clone_and_setup_vvp_data.sh92
-rw-r--r--test/csit/scripts/vvp/docker_health.sh5
-rw-r--r--test/csit/scripts/vvp/kill_containers_and_remove_dataFolders.sh10
-rw-r--r--test/csit/scripts/vvp/start_vvp_containers.sh36
-rw-r--r--test/csit/tests/vvp/sanity/test1.robot4
6 files changed, 110 insertions, 39 deletions
diff --git a/test/csit/plans/vvp/sanity/setup.sh b/test/csit/plans/vvp/sanity/setup.sh
index e7758462c..cab074c06 100644
--- a/test/csit/plans/vvp/sanity/setup.sh
+++ b/test/csit/plans/vvp/sanity/setup.sh
@@ -28,7 +28,7 @@ source ${WORKSPACE}/test/csit/scripts/vvp/docker_health.sh
source ${WORKSPACE}/test/csit/scripts/vvp/start_vvp_sanity.sh
-VVP_IP=`get-instance-ip.sh vvp-engagementmgr`
+VVP_IP=`${WORKSPACE}/test/csit/scripts/get-instance-ip.sh vvp-engagementmgr`
echo VVP_IP=${VVP_IP}
diff --git a/test/csit/scripts/vvp/clone_and_setup_vvp_data.sh b/test/csit/scripts/vvp/clone_and_setup_vvp_data.sh
index aae8d94fe..a39ed083f 100644
--- a/test/csit/scripts/vvp/clone_and_setup_vvp_data.sh
+++ b/test/csit/scripts/vvp/clone_and_setup_vvp_data.sh
@@ -27,6 +27,7 @@ echo "This is ${WORKSPACE}/test/csit/scripts/vvp/clone_and_setup_vvp_data.sh"
# Clone vvp enviroment template
mkdir -p ${WORKSPACE}/data/environments/
mkdir -p ${WORKSPACE}/data/clone/
+mkdir -p /opt/configmaps/settings/
cd ${WORKSPACE}/data/clone
git clone --depth 1 http://gerrit.onap.org/r/vvp/engagementmgr -b master
@@ -34,33 +35,74 @@ git clone --depth 1 http://gerrit.onap.org/r/vvp/engagementmgr -b master
chmod -R 775 ${WORKSPACE}/data/
# copy settings file from tox environment infrastructure:
-cp -rf ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings/tox_settings.py ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings/__init__.py
+cp -f ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings/tox_settings.py /opt/configmaps/settings/__init__.py
+
+# uwsgi.ini file creation
+echo "[uwsgi]
+http = :80
+plugin = python
+chdir = /srv
+module = vvp.wsgi:application
+master = True
+pidfile = /tmp/project-master.pid
+vacuum = True
+max-requests = 5000
+enable-threads = True
+stats = 0.0.0.0:9000
+stats-http = True" > /opt/configmaps/settings/uwsgi.ini
+
+# storage.py file creation
+echo "from storages.backends.s3boto import S3BotoStorage
+from django.conf import settings
+class S3StaticStorage(S3BotoStorage):
+ custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.STATIC_BUCKET)
+ bucket_name = settings.STATIC_BUCKET
+class S3MediaStorage(S3BotoStorage):
+ custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.MEDIA_BUCKET)
+ bucket_name = settings.MEDIA_BUCKET" > /opt/configmaps/settings/storage.py
+
+# envbool.py file creation
+echo "import os
+def envbool(key, default=False, unknown=True):
+ return {'true': True, '1': True, 'false': False, '0': False,
+ '': default,}.get(os.getenv(key, '').lower(), unknown)" > /opt/configmaps/settings/envbool.py
+
+# vvp_env.list file creation
echo "# set enviroment variables
-DJANGO_SETTINGS_MODULE='vvp.settings.tox_settings'
+OAUTHLIB_INSECURE_TRANSPORT=1
+HOST_IP=${IP}
+ENVNAME=${ENVIRONMENT}
+http_proxy=${http_proxy}
+https_proxy=${https_proxy}
+no_proxy=${no_proxy}
+DJANGO_SETTINGS_MODULE=vvp.settings
# export PYTHONPATH={pwd}
-SECRET_KEY='6mo22&_gtjf#wktqf1#ve^7=w6kx)uq0u*4ksk^aq8lte&)yul'
-ENVIRONMENT='development'
-PROGRAM_NAME_URL_PREFIX='vvp'
-EMAIL_HOST='localhost'
-EMAIL_HOST_PASSWORD=''
-EMAIL_HOST_USER=''
-EMAIL_PORT='25'
-PGDATABASE='icedb'
-PGUSER='iceuser'
-PGPASSWORD='Aa123456'
-PGHOST='localhost'
-PGPORT='5433'
-SECRET_WEBHOOK_TOKEN='Aiwi8se4ien0foW6eimahch2zahshaGi'
-SECRET_GITLAB_AUTH_TOKEN='ieNgathapoo4zohvee9a'
-SECRET_JENKINS_PASSWORD='xaiyie0wuoqueuBu'
-SECRET_CMS_APP_CLIENT_ID='MHmJo0ccDheVVsIiQHZnY6LXPAC6H6HAMzhCCM16'
-SECRET_CMS_APP_CLIENT_SECRET='nI8QCFrKMpnw5nTs'
-SLACK_API_TOKEN=''
-S3_HOST='dev-s3.d2ice.att.io'
-S3_PORT='443'
-AWS_ACCESS_KEY_ID='FD21HBU2KRN3UVD1MWRN'
-AWS_SECRET_ACCESS_KEY='TKoiwxziUWG9cTYUknUkFGmmyuQ27nP2lCiutEsD'
-STATIC_ROOT='/app/htdocs'" > ${WORKSPACE}/data/environments/vvp_env
+SECRET_KEY=6mo22&FAKEFALEFALEFKEuq0u*4ksk^aq8lte&)yul
+ENVIRONMENT=development
+SERVICE_PROVIDER=ExampleProvider
+PROGRAM_NAME=VVP
+PROGRAM_NAME_URL_PREFIX=vvp
+SERVICE_PROVIDER_DOMAIN=example-domain.com
+EMAIL_HOST=localhost
+EMAIL_HOST_PASSWORD=
+EMAIL_HOST_USER=
+EMAIL_PORT=25
+PGDATABASE=icedb
+PGUSER=iceuser
+PGPASSWORD=Aa123456
+PGHOST=localhost
+PGPORT=5433
+SECRET_WEBHOOK_TOKEN=AiwiFAKEFAKEFAKEmahch2zahshaGi
+SECRET_GITLAB_AUTH_TOKEN=ieNgFAKEFAKE4zohvee9a
+SECRET_JENKINS_PASSWORD=xaiyiFAKEFAKEqueuBu
+SECRET_CMS_APP_CLIENT_ID=MHmJo0ccDhFAKEFAKEFAKEPAC6H6HAMzhCCM16
+SECRET_CMS_APP_CLIENT_SECRET=nI8QFAKEEEpnw5nTs
+SLACK_API_TOKEN=
+S3_HOST=localhost
+S3_PORT=443
+AWS_ACCESS_KEY_ID=FD2FAKEFAKEFAKEVD1MWRN
+AWS_SECRET_ACCESS_KEY=TKoiwxzFAKEFAKEFAKEFAKEFAKEQ27nP2lCiutEsD
+STATIC_ROOT=/app/htdocs" > ${WORKSPACE}/data/environments/vvp_env.list
ifconfig
diff --git a/test/csit/scripts/vvp/docker_health.sh b/test/csit/scripts/vvp/docker_health.sh
index d511a378f..520b2dc3a 100644
--- a/test/csit/scripts/vvp/docker_health.sh
+++ b/test/csit/scripts/vvp/docker_health.sh
@@ -25,9 +25,8 @@
echo "VVP-Engagement-Manager health-Check:"
echo ""
echo ""
-res=`curl -s -X GET -H "Accept: application/json" -H "Content-Type: application/json" -H "http://localhost:8000/vvp/v1/engmgr/vendors" | wc -l`
-if [[ ${res} == 0 ]]
-then
+res=`curl -s -X GET -H "Accept: application/json" -H "Content-Type: application/json" "http://localhost:9090/vvp/v1/engmgr/vendors" | wc -w`
+if [ ${res} == 0 ]; then
echo "Error [${res}] while performing vvp engagement manager vendor existance check"
exit 1
fi
diff --git a/test/csit/scripts/vvp/kill_containers_and_remove_dataFolders.sh b/test/csit/scripts/vvp/kill_containers_and_remove_dataFolders.sh
index 5b91c67ec..a6c108ebc 100644
--- a/test/csit/scripts/vvp/kill_containers_and_remove_dataFolders.sh
+++ b/test/csit/scripts/vvp/kill_containers_and_remove_dataFolders.sh
@@ -19,11 +19,13 @@
echo "This is ${WORKSPACE}/test/csit/scripts/vvp/kill_and_remove_dataFolder.sh"
+
+CONTAINER_NAME="vvp-engagementmgr"
+
#kill and remove all vvp dockers
-docker stop $(docker ps -a -q --filter="name=vvp")
-docker rm $(docker ps -a -q --filter="name=vvp")
+docker stop $CONTAINER_NAME
+docker rm -f $CONTAINER_NAME
#delete data folder
-#TODO: Remove the comment:
-#rm -rf ${WORKSPACE}/data/*
+rm -rf ${WORKSPACE}/data/*
diff --git a/test/csit/scripts/vvp/start_vvp_containers.sh b/test/csit/scripts/vvp/start_vvp_containers.sh
index d6cb88ebf..5f905b6ad 100644
--- a/test/csit/scripts/vvp/start_vvp_containers.sh
+++ b/test/csit/scripts/vvp/start_vvp_containers.sh
@@ -29,8 +29,23 @@ export PREFIX='nexus3.onap.org:10001/openecomp/vvp'
export RELEASE='latest'
#start Engagement Manager pod:
-docker run --detach --name vvp-engagementmgr --env HOST_IP=${IP} --env ENVNAME="${ENVIRONMENT}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --env-file ${WORKSPACE}/data/environments/vvp_env --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/engagementmgr/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --volume ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings:/opt/configmaps/settings/ --publish 8443:8443 --publish 8000:8000 ${PREFIX}/engagementmgr:${RELEASE}
+docker run \
+--detach \
+--entrypoint="" \
+--name vvp-engagementmgr \
+--env-file ${WORKSPACE}/data/environments/vvp_env.list \
+--log-driver=json-file \
+--log-opt max-size=100m \
+--log-opt max-file=10 \
+--ulimit memlock=-1:-1 \
+--memory 4g \
+--memory-swap=4g \
+--ulimit nofile=4096:100000 \
+--volume /etc/localtime:/etc/localtime:ro \
+--volume /opt/configmaps/settings:/opt/configmaps/settings/ \
+--publish 9090:80 ${PREFIX}/engagementmgr:${RELEASE}
+docker cp /opt/configmaps/settings/uwsgi.ini vvp-engagementmgr:/srv/vvp/settings/
echo "please wait while Engagement Manager is starting..."
echo ""
@@ -43,11 +58,23 @@ while [ $c -gt 0 ]; do
done
echo -e ""
+#run migration again:
+docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py migrate"
+
+#run initial populate db again:
+docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py initial_populate_db"
+
+
+echo "Will copy the generated DB sqlite3 file into the application directory in 30 seconds..."
+sleep 30
+#copy the generated DB sqlite3 file into the application directory:
+docker exec -d vvp-engagementmgr sh -c "cp emdb.db /srv/emdb.db -f"
+
TIME_OUT=600
INTERVAL=5
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:8000/vvp/v1/engmgr/vendors); echo $response
+ response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:9090/vvp/v1/engmgr/vendors); echo $response
if [ "$response" == "200" ]; then
echo VVP-Engagement-Manager well started in $TIME seconds
@@ -61,5 +88,6 @@ done
if [ "$TIME" -ge "$TIME_OUT" ]; then
echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
-
-
+else
+ echo "Done starting vvp containers!"
+fi
diff --git a/test/csit/tests/vvp/sanity/test1.robot b/test/csit/tests/vvp/sanity/test1.robot
index 21e1b5e7a..27612fdb8 100644
--- a/test/csit/tests/vvp/sanity/test1.robot
+++ b/test/csit/tests/vvp/sanity/test1.robot
@@ -4,13 +4,13 @@ Library OperatingSystem
Library RequestsLibrary
Library json
-# http://localhost:8000/vvp/v1/engmgr/vendors
+# http://localhost:9090/vvp/v1/engmgr/vendors
# vvp-engagementmgr
*** Test Cases ***
Get Requests health check ok
[Tags] get
- CreateSession vvp-engagementmgr http://localhost:8000
+ CreateSession vvp-engagementmgr http://localhost:9090
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
${resp}= Get Request vvp-engagementmgr /vvp/v1/engmgr/vendors headers=&{headers}
Should Be Equal As Strings ${resp.status_code} 200