aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitattributes7
-rw-r--r--deployment/heat/onap-oom/env/gwu/onap.env2
-rw-r--r--deployment/heat/onap-oom/env/huawei/onap-beijing-oom.env2
-rw-r--r--deployment/heat/onap-oom/env/tlab/ETE-HEAT-Test.env2
-rw-r--r--deployment/heat/onap-oom/env/tlab/ETE-OOM-Test.env2
-rw-r--r--deployment/heat/onap-oom/env/tlab/EXTONAP_DEV.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-Jenkins.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-SB-03.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-SB-04.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-SB-05.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-SB-06.env2
-rw-r--r--deployment/heat/onap-oom/env/windriver/Integration-SB-07.env2
-rw-r--r--deployment/heat/onap-oom/k8s_vm_entrypoint.sh91
-rwxr-xr-xdeployment/heat/onap-oom/scripts/deploy.sh5
-rw-r--r--test/csit/plans/aaf/aafapi/setup.sh22
-rwxr-xr-xtest/csit/plans/aaf/sms-test-plan/setup.sh71
-rw-r--r--test/csit/plans/aaf/sms-test-plan/teardown.sh25
-rw-r--r--test/csit/plans/aaf/sms-test-plan/testplan.txt3
-rw-r--r--test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-cached.properties (renamed from test/csit/plans/aai/resources/aai-resources/appconfig/titan-cached.properties)6
-rw-r--r--test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-realtime.properties (renamed from test/csit/plans/aai/traversal/aai-resources/appconfig/titan-realtime.properties)6
-rw-r--r--test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-cached.properties (renamed from test/csit/plans/aai/traversal/aai-resources/appconfig/titan-cached.properties)6
-rw-r--r--test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-realtime.properties (renamed from test/csit/plans/aai/resources/aai-resources/appconfig/titan-realtime.properties)6
-rw-r--r--test/csit/plans/aai/resources/docker-compose.yml27
-rw-r--r--test/csit/plans/aai/resources/setup.sh8
-rw-r--r--test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-cached.properties (renamed from test/csit/plans/aai/resources/aai-traversal/appconfig/titan-cached.properties)6
-rw-r--r--test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-realtime.properties (renamed from test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-realtime.properties)6
-rw-r--r--test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-cached.properties (renamed from test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-cached.properties)6
-rw-r--r--test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-realtime.properties (renamed from test/csit/plans/aai/resources/aai-traversal/appconfig/titan-realtime.properties)6
-rw-r--r--test/csit/plans/aai/traversal/docker-compose.yml27
-rw-r--r--test/csit/plans/aai/traversal/setup.sh10
-rwxr-xr-xtest/csit/plans/appc/healthcheck/bundle_query.sh8
-rwxr-xr-xtest/csit/plans/appc/healthcheck/health_check.sh2
-rwxr-xr-xtest/csit/plans/appc/healthcheck/setup.sh20
-rwxr-xr-xtest/csit/plans/dmaap/mrpubsub/setup.sh2
-rw-r--r--test/csit/plans/holmes-rule-management/sanity-check/setup.sh6
-rw-r--r--test/csit/plans/multicloud-vmware/functionality1/testplan.txt4
-rwxr-xr-xtest/csit/plans/multicloud/functionality1/setup.sh4
-rwxr-xr-xtest/csit/plans/multicloud/functionality1/teardown.sh1
-rw-r--r--test/csit/plans/music/music-distributed-kv-store-test-plan/setup.sh60
-rw-r--r--test/csit/plans/music/music-distributed-kv-store-test-plan/teardown.sh21
-rw-r--r--test/csit/plans/music/music-distributed-kv-store-test-plan/testplan.txt3
-rwxr-xr-xtest/csit/plans/music/music-test-plan/setup.sh30
-rw-r--r--test/csit/plans/portal-sdk/testsuite/.env29
-rw-r--r--test/csit/plans/portal-sdk/testsuite/docker-compose.yml127
-rw-r--r--test/csit/plans/portal-sdk/testsuite/setup.sh24
-rw-r--r--test/csit/plans/portal/testsuite/.env29
-rw-r--r--test/csit/plans/portal/testsuite/docker-compose.yml127
-rw-r--r--test/csit/plans/portal/testsuite/setup.sh26
-rw-r--r--test/csit/plans/sdc/uiSanity/setup.sh2
-rw-r--r--test/csit/plans/so/integration-testing/setup.sh (renamed from test/csit/plans/so/sanity-check/setup.sh)0
-rw-r--r--test/csit/plans/so/integration-testing/teardown.sh (renamed from test/csit/plans/so/sanity-check/teardown.sh)0
-rw-r--r--test/csit/plans/so/integration-testing/testplan.txt (renamed from test/csit/plans/so/sanity-check/testplan.txt)0
-rw-r--r--test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/setup.sh44
-rw-r--r--test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/teardown.sh22
-rw-r--r--test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/testplan.txt3
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt6
-rw-r--r--test/csit/plans/vnfsdk-ice/sanity-check/setup.sh2
-rwxr-xr-xtest/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh2
-rwxr-xr-xtest/csit/scripts/clamp/start_clamp_containers.sh2
-rw-r--r--test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap6
-rwxr-xr-xtest/csit/scripts/optf-has/has/has_script.sh26
-rwxr-xr-xtest/csit/scripts/optf-has/has/has_teardown_script.sh5
-rwxr-xr-xtest/csit/scripts/optf-has/has/music_script.sh27
-rwxr-xr-xtest/csit/scripts/optf-has/has/music_teardown_script.sh2
-rwxr-xr-xtest/csit/scripts/optf-has/has/simulator_script.sh26
-rwxr-xr-xtest/csit/scripts/optf-has/has/simulator_teardown_script.sh3
-rwxr-xr-xtest/csit/scripts/policy/script1.sh17
-rw-r--r--test/csit/tests/aaf/aaf-sms-suite/__init__.robot2
-rw-r--r--test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot94
-rw-r--r--test/csit/tests/aaf/aaf-sms-suite/data/create_domain.json3
-rw-r--r--test/csit/tests/aaf/aaf-sms-suite/data/create_secret.json12
-rw-r--r--test/csit/tests/clamp/APIs/01__Create_CL_Holmes.robot11
-rw-r--r--test/csit/tests/clamp/APIs/02__Create_CL_TCA.robot11
-rw-r--r--test/csit/tests/clamp/APIs/03__VariousApis.robot11
-rw-r--r--test/csit/tests/clamp/APIs/04__Verify_API_Models.robot14
-rw-r--r--test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot8
-rw-r--r--test/csit/tests/clamp/UIs/02__Create_TCA_model.robot22
-rw-r--r--test/csit/tests/clamp/UIs/03__Verify_UI_Models.robot10
-rw-r--r--test/csit/tests/clamp/UIs/04__Submit_deploy_chain_Holmes.robot130
-rw-r--r--test/csit/tests/clamp/UIs/05__Submit_deploy_chain_TCA.robot130
-rw-r--r--test/csit/tests/dcaegen2/testcases/resources/DMaaP.py844
-rw-r--r--test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py318
-rw-r--r--test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot266
-rw-r--r--test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot30
-rw-r--r--test/csit/tests/multicloud-vmware/hosts/sanity-host.robot24
-rw-r--r--test/csit/tests/multicloud-vmware/images/sanity-image.robot24
-rw-r--r--test/csit/tests/multicloud-vmware/networks/sanity-network.robot24
-rw-r--r--test/csit/tests/multicloud-vmware/provision/jsoninput/image_file.json7
-rw-r--r--test/csit/tests/multicloud-vmware/provision/sanity_test_image.robot34
-rw-r--r--test/csit/tests/multicloud-vmware/samples/sanity-sample.robot25
-rw-r--r--test/csit/tests/multicloud/provision/data/capacity.json6
-rw-r--r--test/csit/tests/multicloud/provision/sanity_test_multivim.robot11
-rw-r--r--test/csit/tests/music/music-distributed-kv-store-suite/__init__.robot2
-rw-r--r--test/csit/tests/music/music-distributed-kv-store-suite/data/register_domain.json3
-rw-r--r--test/csit/tests/music/music-distributed-kv-store-suite/music-distributed-kv-store-test.robot53
-rw-r--r--test/csit/tests/optf-has/has/data/healthcheck.json19
-rw-r--r--test/csit/tests/optf-has/has/data/onboard.json6
-rw-r--r--test/csit/tests/optf-has/has/data/plan_with_lati_and_longi.json41
-rw-r--r--test/csit/tests/optf-has/has/data/plan_with_short_distance_constraint.json64
-rw-r--r--test/csit/tests/optf-has/has/data/plan_with_wrong_distance_constraint.json63
-rw-r--r--test/csit/tests/optf-has/has/data/plan_with_wrong_version.json202
-rw-r--r--test/csit/tests/optf-has/has/data/plan_without_demand_section.json120
-rw-r--r--test/csit/tests/optf-has/has/optf_has_test.robot182
-rw-r--r--test/csit/tests/policy/suite1/Policy-CSIT.robot50
-rw-r--r--test/csit/tests/policy/suite1/getoofpolicy.template6
-rw-r--r--test/csit/tests/policy/suite1/oofpolicy_HPA_R1.template6
-rw-r--r--test/csit/tests/portal-sdk/testsuites/test1.robot204
-rw-r--r--test/csit/tests/portal/testsuites/test1.robot337
-rw-r--r--test/csit/tests/sdc/uiSanity/__init__.robot2
-rw-r--r--test/csit/tests/sdc/uiSanity/test1.robot16
-rw-r--r--test/csit/tests/vfc/nfvo-multivimproxy/test.robot24
-rw-r--r--test/csit/tests/vfc/nfvo-wfengine/workflow.robot226
-rw-r--r--test/csit/tests/vnfsdk-pkgtools/tosca-metadata/csar/test_entry.mf4
-rw-r--r--test/ete/labs/tlab/onap-openstack-template.env5
-rw-r--r--test/ete/labs/windriver/onap-openstack-template.env5
-rwxr-xr-xtest/ete/scripts/deploy-onap.sh8
-rwxr-xr-xtest/vcpe/config_sdnc_so.py89
-rwxr-xr-xtest/vcpe/csar_parser.py231
-rwxr-xr-xtest/vcpe/get_info.py26
-rwxr-xr-xtest/vcpe/healthcheck.py30
-rwxr-xr-xtest/vcpe/loop.py37
-rwxr-xr-xtest/vcpe/preload.py216
-rwxr-xr-xtest/vcpe/soutils.py318
-rwxr-xr-xtest/vcpe/vcpe.py207
-rwxr-xr-xtest/vcpe/vcpe_custom_service.py80
-rwxr-xr-xtest/vcpe/vcpecommon.py414
-rw-r--r--version-manifest/pom.xml10
-rw-r--r--version-manifest/src/main/resources/docker-manifest.csv125
-rw-r--r--version-manifest/src/main/resources/java-manifest.csv136
-rwxr-xr-xversion-manifest/src/main/scripts/check-sorted.sh12
-rw-r--r--version.properties2
131 files changed, 4931 insertions, 1805 deletions
diff --git a/.gitattributes b/.gitattributes
index 4048784af..6313b56c5 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,6 +1 @@
-*.java text eol=lf
-*.xml text eol=lf
-*.sh text eol=lf
-*.csv text eol=lf
-*.yaml text eol=lf
-*.json text eol=lf
+* text=auto eol=lf
diff --git a/deployment/heat/onap-oom/env/gwu/onap.env b/deployment/heat/onap-oom/env/gwu/onap.env
index 3ca447d70..1a9aa9a82 100644
--- a/deployment/heat/onap-oom/env/gwu/onap.env
+++ b/deployment/heat/onap-oom/env/gwu/onap.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 192.168.1.51:3142
docker_proxy: 192.168.1.51:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m2.xxlarge
public_net_id: 024582bd-ef9b-48b9-9e70-e6732559d9df
diff --git a/deployment/heat/onap-oom/env/huawei/onap-beijing-oom.env b/deployment/heat/onap-oom/env/huawei/onap-beijing-oom.env
index 0365c751c..88d6b7492 100644
--- a/deployment/heat/onap-oom/env/huawei/onap-beijing-oom.env
+++ b/deployment/heat/onap-oom/env/huawei/onap-beijing-oom.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.145.122.118:3142
docker_proxy: 10.145.122.118:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m2.xxlarge
public_net_id: 3a6247f1-fac6-4167-a49f-33cc8415ccf4
diff --git a/deployment/heat/onap-oom/env/tlab/ETE-HEAT-Test.env b/deployment/heat/onap-oom/env/tlab/ETE-HEAT-Test.env
index 442d9fd76..405008380 100644
--- a/deployment/heat/onap-oom/env/tlab/ETE-HEAT-Test.env
+++ b/deployment/heat/onap-oom/env/tlab/ETE-HEAT-Test.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 192.168.31.204:3142
docker_proxy: 192.168.31.204:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m2.xxlarge
public_net_id: fbe8fd92-6636-4e63-ab28-bb6a5b0888a9
diff --git a/deployment/heat/onap-oom/env/tlab/ETE-OOM-Test.env b/deployment/heat/onap-oom/env/tlab/ETE-OOM-Test.env
index 3cd226113..63d2f671a 100644
--- a/deployment/heat/onap-oom/env/tlab/ETE-OOM-Test.env
+++ b/deployment/heat/onap-oom/env/tlab/ETE-OOM-Test.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 192.168.31.204:3142
docker_proxy: 192.168.31.204:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m2.xxlarge
public_net_id: fbe8fd92-6636-4e63-ab28-bb6a5b0888a9
diff --git a/deployment/heat/onap-oom/env/tlab/EXTONAP_DEV.env b/deployment/heat/onap-oom/env/tlab/EXTONAP_DEV.env
index 2d360eaca..cb9244299 100644
--- a/deployment/heat/onap-oom/env/tlab/EXTONAP_DEV.env
+++ b/deployment/heat/onap-oom/env/tlab/EXTONAP_DEV.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 192.168.31.204:3142
docker_proxy: 192.168.31.204:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m2.xxlarge
public_net_id: fbe8fd92-6636-4e63-ab28-bb6a5b0888a9
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-Jenkins.env b/deployment/heat/onap-oom/env/windriver/Integration-Jenkins.env
index 846d77b6e..22a5bdeb2 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-Jenkins.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-Jenkins.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-SB-03.env b/deployment/heat/onap-oom/env/windriver/Integration-SB-03.env
index 98c6c5f25..350bd475a 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-SB-03.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-SB-03.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-SB-04.env b/deployment/heat/onap-oom/env/windriver/Integration-SB-04.env
index ea7aa7ddb..4e6e1cdfd 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-SB-04.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-SB-04.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-SB-05.env b/deployment/heat/onap-oom/env/windriver/Integration-SB-05.env
index 5369182b4..47147ed3a 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-SB-05.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-SB-05.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-SB-06.env b/deployment/heat/onap-oom/env/windriver/Integration-SB-06.env
index 4203c79d8..6cf405386 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-SB-06.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-SB-06.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/env/windriver/Integration-SB-07.env b/deployment/heat/onap-oom/env/windriver/Integration-SB-07.env
index efefa3888..10b691e89 100644
--- a/deployment/heat/onap-oom/env/windriver/Integration-SB-07.env
+++ b/deployment/heat/onap-oom/env/windriver/Integration-SB-07.env
@@ -13,7 +13,7 @@ parameters:
apt_proxy: 10.12.5.2:3142
docker_proxy: 10.12.5.2:5000
- rancher_vm_flavor: m1.small
+ rancher_vm_flavor: m1.large
k8s_vm_flavor: m1.xxlarge
public_net_id: 971040b2-7059-49dc-b220-4fab50cb2ad4
diff --git a/deployment/heat/onap-oom/k8s_vm_entrypoint.sh b/deployment/heat/onap-oom/k8s_vm_entrypoint.sh
index 119e40a04..d483e73df 100644
--- a/deployment/heat/onap-oom/k8s_vm_entrypoint.sh
+++ b/deployment/heat/onap-oom/k8s_vm_entrypoint.sh
@@ -19,7 +19,7 @@ Acquire::https::Proxy "DIRECT";
EOF
fi
apt-get -y update
-apt-get -y install linux-image-extra-$(uname -r) jq
+apt-get -y install linux-image-extra-$(uname -r) jq make
cd ~
@@ -34,13 +34,9 @@ sudo mv ./kubectl /usr/local/bin/kubectl
mkdir ~/.kube
# install helm
-wget -q http://storage.googleapis.com/kubernetes-helm/helm-v2.6.1-linux-amd64.tar.gz
-tar -zxvf helm-v2.6.1-linux-amd64.tar.gz
+wget -q http://storage.googleapis.com/kubernetes-helm/helm-v2.7.2-linux-amd64.tar.gz
+tar -zxvf helm-v2.7.2-linux-amd64.tar.gz
sudo mv linux-amd64/helm /usr/local/bin/helm
-# verify version
-helm version
-# Rancher 1.6.14 installs 2.6.1 - if you upgrade to 2.8.0 - you will need to upgrade helm on the server to the version to level of client
-helm init --upgrade
# Fix virtual memory allocation for onap-log:elasticsearch:
echo "vm.max_map_count=262144" >> /etc/sysctl.conf
@@ -88,18 +84,6 @@ done
RANCHER_AGENT_CMD=$(jq -r .command token.json)
eval $RANCHER_AGENT_CMD
-# download rancher CLI
-wget -q https://github.com/rancher/cli/releases/download/v0.6.7/rancher-linux-amd64-v0.6.7.tar.xz
-unxz rancher-linux-amd64-v0.6.7.tar.xz
-tar xvf rancher-linux-amd64-v0.6.7.tar
-
-# Clone OOM:
-cd ~
-git clone -b master http://gerrit.onap.org/r/oom
-
-# Update values.yaml to point to docker-proxy instead of nexus3:
-cd ~/oom/kubernetes
-perl -p -i -e 's/nexus3.onap.org:10001/__docker_proxy__/g' `find ./ -name values.yaml` oneclick/setenv.bash
KUBETOKEN=$(echo -n 'Basic '$(echo -n "$RANCHER_ACCESS_KEY:$RANCHER_SECRET_KEY" | base64 -w 0) | base64 -w 0)
@@ -128,53 +112,32 @@ EOF
export KUBECONFIG=/root/.kube/config
kubectl config view
-# Update ~/oom/kubernetes/kube2msb/values.yaml kubeMasterAuthToken to use the token from ~/.kube/config
-sed -i "s/kubeMasterAuthToken:.*/kubeMasterAuthToken: $KUBETOKEN/" ~/oom/kubernetes/kube2msb/values.yaml
-
-# Put your onap_key ssh private key in ~/.ssh/onap_key
-
-# Create or edit ~/oom/kubernetes/config/onap-parameters.yaml
-cat > ~/oom/kubernetes/config/onap-parameters.yaml <<EOF
-OPENSTACK_UBUNTU_14_IMAGE: "__ubuntu_1404_image__"
-OPENSTACK_PUBLIC_NET_ID: "__public_net_id__"
-OPENSTACK_OAM_NETWORK_ID: "__oam_network_id__"
-OPENSTACK_OAM_SUBNET_ID: "__oam_subnet_id__"
-OPENSTACK_OAM_NETWORK_CIDR: "__oam_network_cidr__"
-OPENSTACK_USERNAME: "__openstack_username__"
-OPENSTACK_API_KEY: "__openstack_api_key__"
-OPENSTACK_TENANT_NAME: "__openstack_tenant_name__"
-OPENSTACK_TENANT_ID: "__openstack_tenant_id__"
-OPENSTACK_REGION: "RegionOne"
-OPENSTACK_KEYSTONE_URL: "__keystone_url__"
-OPENSTACK_FLAVOUR_MEDIUM: "m1.medium"
-OPENSTACK_SERVICE_TENANT_NAME: "service"
-DMAAP_TOPIC: "AUTO"
-DEMO_ARTIFACTS_VERSION: "1.1.1"
-EOF
-cat ~/oom/kubernetes/config/onap-parameters.yaml
-
-
# wait for kubernetes to initialze
sleep 100
until [ $(kubectl get pods --namespace kube-system | tail -n +2 | grep -c Running) -ge 6 ]; do
sleep 10
done
-# Source the environment file:
-cd ~/oom/kubernetes/oneclick/
-source setenv.bash
-# run the config pod creation
-cd ~/oom/kubernetes/config
-./createConfig.sh -n onap
+# Install using OOM
+export HOME=/root
-# Wait until the config container completes.
-sleep 20
-until [ $(kubectl get pods --namespace onap -a | tail -n +2 | grep -c Completed) -eq 1 ]; do
- sleep 10
-done
+# Clone OOM:
+cd ~
+git clone -b master http://gerrit.onap.org/r/oom
+git log -1
-# version control the config to see what's happening
+# Update values.yaml to point to docker-proxy instead of nexus3:
+cd ~/oom/kubernetes
+#perl -p -i -e 's/nexus3.onap.org:10001/__docker_proxy__/g' `find ./ -name values.yaml`
+sed -i 's/nexus3.onap.org:10001/__docker_proxy__/g' onap/values.yaml
+sed -i 's/#repository:/repository:/g' onap/values.yaml
+sed -i 's/#repositorySecret:/repositorySecret:/g' onap/values.yaml
+git diff
+
+
+# version control the persistence volume to see what's happening
+mkdir -p /dockerdata-nfs/
cd /dockerdata-nfs/
git init
git config user.email "root@k8s"
@@ -183,8 +146,18 @@ git add -A
git commit -m "initial commit"
# Run ONAP:
-cd ~/oom/kubernetes/oneclick/
-./createAll.bash -n onap
+cd ~/oom/kubernetes/
+# verify version
+helm version
+helm init --client-only
+helm init --upgrade
+helm serve &
+sleep 3
+helm repo add local http://127.0.0.1:8879
+helm repo list
+make all
+helm search -l | grep local
+helm install local/onap -n dev --namespace onap
# Check ONAP status:
sleep 3
diff --git a/deployment/heat/onap-oom/scripts/deploy.sh b/deployment/heat/onap-oom/scripts/deploy.sh
index 60ceab663..c81e65c8b 100755
--- a/deployment/heat/onap-oom/scripts/deploy.sh
+++ b/deployment/heat/onap-oom/scripts/deploy.sh
@@ -45,6 +45,7 @@ for n in $(seq 1 10); do
fi
sleep 15m
done
-LOG_DIR=$(ssh -o StrictHostKeychecking=no -i ~/.ssh/onap_key ubuntu@$K8S_IP "ls -1t /dockerdata-nfs/onap/robot/eteshare/logs | head -1")
-rsync -e "ssh -i ~/.ssh/onap_key" -avPz ubuntu@$K8S_IP:/dockerdata-nfs/onap/robot/eteshare/logs/${LOG_DIR}/ $WORKSPACE/archives/
+ROBOT_POD=$(ssh -o StrictHostKeychecking=no -i ~/.ssh/onap_key ubuntu@$K8S_IP 'sudo su -c "kubectl --namespace onap get pods"' | grep robot | sed 's/ .*//')
+LOG_DIR=$(ssh -o StrictHostKeychecking=no -i ~/.ssh/onap_key ubuntu@$K8S_IP "sudo su -c \"kubectl exec $ROBOT_POD --namespace onap -- ls -1t /share/logs | head -1\"")
+wget --user=robot --password=robot -r -np -nH --cut-dirs=2 -R "index.html*" -P $WORKSPACE/archives/ http://$K8S_IP:30209/logs/$LOG_DIR/
exit 0
diff --git a/test/csit/plans/aaf/aafapi/setup.sh b/test/csit/plans/aaf/aafapi/setup.sh
index bfaff925c..4a312704f 100644
--- a/test/csit/plans/aaf/aafapi/setup.sh
+++ b/test/csit/plans/aaf/aafapi/setup.sh
@@ -30,24 +30,24 @@ cd $WORKSPACE/archives/aafcsit
#unset http_proxy https_proxy
git clone --depth 1 http://gerrit.onap.org/r/aaf/authz -b master
git pull
-cd $WORKSPACE/archives/aafcsit/authz/authz-service/src/main/resources/docker-compose
+cd $WORKSPACE/archives/aafcsit/authz/auth/auth-service/src/main/resources/docker-compose
pwd
-chmod -R 777 $WORKSPACE/archives/aafcsit/authz/authz-service/src/main/resources/docker-compose
+chmod -R 777 $WORKSPACE/archives/aafcsit/authz/auth/auth-service/src/main/resources/docker-compose
# start aaf containers with docker compose and configuration from docker-compose.yml
docker-compose up -d
# Wait for initialization of Docker contaienr for AAF & Cassandra
-for i in {1..50}; do
+for i in {1..12}; do
if [ $(docker inspect --format '{{ .State.Running }}' dockercompose_aaf_container_1) ] && \
[ $(docker inspect --format '{{ .State.Running }}' dockercompose_cassandra_container_1) ] && \
- [ $(docker inspect --format '{{ .State.Running }}' dockercompose_aaf_container_1) ]
+ [ $(docker inspect --format '{{ .State.Running }}' dockercompose_aaf_container_1) ]
then
- echo "AAF Service Running"
- break
- else
- echo sleep $i
+ echo "AAF Service Running"
+ break
+ else
+ echo sleep $i
sleep $i
fi
done
@@ -62,11 +62,11 @@ echo CASSANDRA_IP=${CASSANDRA_IP}
# Wait for initialization of docker services
-for i in {1..50}; do
- curl -sS -m 1 ${AAF_IP}:8101 && break
+for i in {1..12}; do
+ curl -sS -m 1 ${AAF_IP}:8101 && break
echo sleep $i
sleep $i
done
#Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v AAF_IP:${AAF_IP}"
+ROBOT_VARIABLES="-v AAF_IP:${AAF_IP}"
diff --git a/test/csit/plans/aaf/sms-test-plan/setup.sh b/test/csit/plans/aaf/sms-test-plan/setup.sh
new file mode 100755
index 000000000..9f77b698e
--- /dev/null
+++ b/test/csit/plans/aaf/sms-test-plan/setup.sh
@@ -0,0 +1,71 @@
+#!/bin/bash
+#
+# Copyright 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Not sure why this is needed.
+source ${SCRIPTS}/common_functions.sh
+
+CONFIG_FILE=$(pwd)/config/smsconfig.json
+
+mkdir -p $(pwd)/config
+
+docker login -u docker -p docker nexus3.onap.org:10001
+docker pull nexus3.onap.org:10001/onap/aaf/sms
+docker pull docker.io/vault:0.9.5
+
+#
+# Running vault in dev server mode here for CSIT
+# In HELM it runs in production mode
+#
+docker run -e "VAULT_DEV_ROOT_TOKEN_ID=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" \
+ -e SKIP_SETCAP=true \
+ --name vault -d -p 8200:8200 vault:0.9.5
+
+SMSDB_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' vault)
+cat << EOF > $CONFIG_FILE
+{
+ "cafile": "auth/selfsignedca.pem",
+ "servercert": "auth/server.cert",
+ "serverkey": "auth/server.key",
+
+ "smsdbaddress": "http://$SMSDB_IP:8200",
+ "vaulttoken": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
+ "disable_tls": true
+}
+EOF
+
+cat $CONFIG_FILE
+
+docker run --workdir /sms -v $CONFIG_FILE:/sms/smsconfig.json \
+ --name sms -d -p 10443:10443 nexus3.onap.org:10001/onap/aaf/sms
+
+SMS_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' sms)
+
+echo "###### WAITING FOR ALL CONTAINERS TO COME UP"
+sleep 20
+for i in {1..20}; do
+ curl -sS -m 1 http://${SMSDB_IP}:8200/v1/sys/seal-status && break
+ echo sleep $i
+ sleep $i
+done
+
+#
+# add here all ROBOT_VARIABLES settings
+#
+echo "# sms robot variables settings";
+ROBOT_VARIABLES="-v SMS_HOSTNAME:http://${SMS_IP} -v SMS_PORT:10443"
+
+echo ${ROBOT_VARIABLES}
diff --git a/test/csit/plans/aaf/sms-test-plan/teardown.sh b/test/csit/plans/aaf/sms-test-plan/teardown.sh
new file mode 100644
index 000000000..d6fa32924
--- /dev/null
+++ b/test/csit/plans/aaf/sms-test-plan/teardown.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Copyright 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+docker cp sms:/sms/sms.log .
+cat sms.log
+rm sms.log
+rm -rf config
+docker stop sms vault
+docker rm sms vault
+docker rmi nexus3.onap.org:10001/onap/aaf/sms
+docker rmi docker.io/vault:0.9.5
diff --git a/test/csit/plans/aaf/sms-test-plan/testplan.txt b/test/csit/plans/aaf/sms-test-plan/testplan.txt
new file mode 100644
index 000000000..c2b3b7b1b
--- /dev/null
+++ b/test/csit/plans/aaf/sms-test-plan/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+aaf/aaf-sms-suite \ No newline at end of file
diff --git a/test/csit/plans/aai/resources/aai-resources/appconfig/titan-cached.properties b/test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-cached.properties
index 97bb81863..c2110f77c 100644
--- a/test/csit/plans/aai/resources/aai-resources/appconfig/titan-cached.properties
+++ b/test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-cached.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/traversal/aai-resources/appconfig/titan-realtime.properties b/test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-realtime.properties
index 0c97b753a..4791431a1 100644
--- a/test/csit/plans/aai/traversal/aai-resources/appconfig/titan-realtime.properties
+++ b/test/csit/plans/aai/resources/aai-resources/appconfig/janusgraph-realtime.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/traversal/aai-resources/appconfig/titan-cached.properties b/test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-cached.properties
index 97bb81863..c2110f77c 100644
--- a/test/csit/plans/aai/traversal/aai-resources/appconfig/titan-cached.properties
+++ b/test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-cached.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/resources/aai-resources/appconfig/titan-realtime.properties b/test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-realtime.properties
index 0c97b753a..4791431a1 100644
--- a/test/csit/plans/aai/resources/aai-resources/appconfig/titan-realtime.properties
+++ b/test/csit/plans/aai/resources/aai-traversal/appconfig/janusgraph-realtime.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/resources/docker-compose.yml b/test/csit/plans/aai/resources/docker-compose.yml
index 3f465c3ec..6f62e6ef2 100644
--- a/test/csit/plans/aai/resources/docker-compose.yml
+++ b/test/csit/plans/aai/resources/docker-compose.yml
@@ -6,11 +6,12 @@ services:
environment:
- LOCAL_USER_ID=${USER_ID}
- LOCAL_GROUP_ID=${GROUP_ID}
+ - SKIP_CREATE_DB_SCHEMA_AT_STARTUP=true
ports:
- 8447:8447
volumes:
- - ${CURRENT_PWD}/aai-resources/appconfig/titan-realtime.properties:/opt/app/aai-resources/resources/etc/appprops/titan-realtime.properties
- - ${CURRENT_PWD}/aai-resources/appconfig/titan-cached.properties:/opt/app/aai-resources/resources/etc/appprops/titan-cached.properties
+ - ${CURRENT_PWD}/aai-resources/appconfig/janusgraph-realtime.properties:/opt/app/aai-resources/resources/etc/appprops/janusgraph-realtime.properties
+ - ${CURRENT_PWD}/aai-resources/appconfig/janusgraph-cached.properties:/opt/app/aai-resources/resources/etc/appprops/janusgraph-cached.properties
- ${CURRENT_PWD}/aai-resources/appconfig/aaiconfig.properties:/opt/app/aai-resources/resources/etc/appprops/aaiconfig.properties
- ${CURRENT_PWD}/aai-resources/appconfig/application.properties:/opt/app/aai-resources/resources/application.properties
- ${CURRENT_PWD}/aai-resources/appconfig/logback.xml:/opt/app/aai-resources/resources/logback.xml
@@ -30,8 +31,8 @@ services:
- LOCAL_GROUP_ID=${GROUP_ID}
- DISABLE_UPDATE_QUERY=true
volumes:
- - ${CURRENT_PWD}/aai-traversal/appconfig/titan-realtime.properties:/opt/app/aai-traversal/resources/etc/appprops/titan-realtime.properties
- - ${CURRENT_PWD}/aai-traversal/appconfig/titan-cached.properties:/opt/app/aai-traversal/resources/etc/appprops/titan-cached.properties
+ - ${CURRENT_PWD}/aai-traversal/appconfig/janusgraph-realtime.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties
+ - ${CURRENT_PWD}/aai-traversal/appconfig/janusgraph-cached.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-cached.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/aaiconfig.properties:/opt/app/aai-traversal/resources/etc/appprops/aaiconfig.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/application.properties:/opt/app/aai-traversal/resources/application.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/logback.xml:/opt/app/aai-traversal/resources/logback.xml
@@ -61,16 +62,18 @@ services:
max-size: "30m"
max-file: "5"
aai.hbase.simpledemo.onap.org:
- image: ${HBASE_IMAGE}:${HBASE_VERSION}
+ image: cassandra:2.1
hostname: aai.hbase.simpledemo.onap.org
ports:
- - 2181:2181
- - 8080:8080
- - 8085:8085
- - 9090:9090
- - 16000:16000
- - 16010:16010
- - 16201:16201
+ - 7000:7000
+ - 7001:7001
+ - 7199:7199
+ - 9042:9042
+ environment:
+ - CASSANDRA_SEEDS=aai.hbase.simpledemo.onap.org
+ - CASSANDRA_DC=Heat
+ - CASSANDRA_RACK=Rack1
+ - CASSANDRA_AUTO_BOOTSTRAP=true
logging:
driver: "json-file"
options:
diff --git a/test/csit/plans/aai/resources/setup.sh b/test/csit/plans/aai/resources/setup.sh
index 14b567108..bd5cb5214 100644
--- a/test/csit/plans/aai/resources/setup.sh
+++ b/test/csit/plans/aai/resources/setup.sh
@@ -73,10 +73,8 @@ ${DOCKER_COMPOSE_CMD} stop
${DOCKER_COMPOSE_CMD} rm -f -v
# Start the hbase where the data will be stored
-HBASE_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai.hbase.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:8085';
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:8080';
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:9095';
+CASSANDRA_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai.hbase.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
+wait_for_container $CASSANDRA_CONTAINER_NAME 'Listening for thrift clients';
USER_EXISTS=$(check_if_user_exists aaiadmin);
@@ -102,6 +100,8 @@ else
export USER_ID=$(id -u aaiadmin);
fi;
+$DOCKER_COMPOSE_CMD run --rm aai-resources.api.simpledemo.onap.org createDBSchema.sh
+
RESOURCES_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai-resources.api.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
wait_for_container $RESOURCES_CONTAINER_NAME 'Resources Microservice Started';
diff --git a/test/csit/plans/aai/resources/aai-traversal/appconfig/titan-cached.properties b/test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-cached.properties
index 97bb81863..c2110f77c 100644
--- a/test/csit/plans/aai/resources/aai-traversal/appconfig/titan-cached.properties
+++ b/test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-cached.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-realtime.properties b/test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-realtime.properties
index 0c97b753a..4791431a1 100644
--- a/test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-realtime.properties
+++ b/test/csit/plans/aai/traversal/aai-resources/appconfig/janusgraph-realtime.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-cached.properties b/test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-cached.properties
index 97bb81863..c2110f77c 100644
--- a/test/csit/plans/aai/traversal/aai-traversal/appconfig/titan-cached.properties
+++ b/test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-cached.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/resources/aai-traversal/appconfig/titan-realtime.properties b/test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-realtime.properties
index 0c97b753a..4791431a1 100644
--- a/test/csit/plans/aai/resources/aai-traversal/appconfig/titan-realtime.properties
+++ b/test/csit/plans/aai/traversal/aai-traversal/appconfig/janusgraph-realtime.properties
@@ -21,10 +21,14 @@
#
query.fast-property=true
+query.smart-limit=false
+
# the following parameters are not reloaded automatically and require a manual bounce
-storage.backend=hbase
+storage.backend=cassandra
storage.hostname=aai.hbase.simpledemo.onap.org
+storage.cassandra.keyspace=aaigraph
+
#schema.default=none
storage.lock.wait-time=300
storage.hbase.table=aaigraph-dev1.dev
diff --git a/test/csit/plans/aai/traversal/docker-compose.yml b/test/csit/plans/aai/traversal/docker-compose.yml
index 3f465c3ec..6f62e6ef2 100644
--- a/test/csit/plans/aai/traversal/docker-compose.yml
+++ b/test/csit/plans/aai/traversal/docker-compose.yml
@@ -6,11 +6,12 @@ services:
environment:
- LOCAL_USER_ID=${USER_ID}
- LOCAL_GROUP_ID=${GROUP_ID}
+ - SKIP_CREATE_DB_SCHEMA_AT_STARTUP=true
ports:
- 8447:8447
volumes:
- - ${CURRENT_PWD}/aai-resources/appconfig/titan-realtime.properties:/opt/app/aai-resources/resources/etc/appprops/titan-realtime.properties
- - ${CURRENT_PWD}/aai-resources/appconfig/titan-cached.properties:/opt/app/aai-resources/resources/etc/appprops/titan-cached.properties
+ - ${CURRENT_PWD}/aai-resources/appconfig/janusgraph-realtime.properties:/opt/app/aai-resources/resources/etc/appprops/janusgraph-realtime.properties
+ - ${CURRENT_PWD}/aai-resources/appconfig/janusgraph-cached.properties:/opt/app/aai-resources/resources/etc/appprops/janusgraph-cached.properties
- ${CURRENT_PWD}/aai-resources/appconfig/aaiconfig.properties:/opt/app/aai-resources/resources/etc/appprops/aaiconfig.properties
- ${CURRENT_PWD}/aai-resources/appconfig/application.properties:/opt/app/aai-resources/resources/application.properties
- ${CURRENT_PWD}/aai-resources/appconfig/logback.xml:/opt/app/aai-resources/resources/logback.xml
@@ -30,8 +31,8 @@ services:
- LOCAL_GROUP_ID=${GROUP_ID}
- DISABLE_UPDATE_QUERY=true
volumes:
- - ${CURRENT_PWD}/aai-traversal/appconfig/titan-realtime.properties:/opt/app/aai-traversal/resources/etc/appprops/titan-realtime.properties
- - ${CURRENT_PWD}/aai-traversal/appconfig/titan-cached.properties:/opt/app/aai-traversal/resources/etc/appprops/titan-cached.properties
+ - ${CURRENT_PWD}/aai-traversal/appconfig/janusgraph-realtime.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties
+ - ${CURRENT_PWD}/aai-traversal/appconfig/janusgraph-cached.properties:/opt/app/aai-traversal/resources/etc/appprops/janusgraph-cached.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/aaiconfig.properties:/opt/app/aai-traversal/resources/etc/appprops/aaiconfig.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/application.properties:/opt/app/aai-traversal/resources/application.properties
- ${CURRENT_PWD}/aai-traversal/appconfig/logback.xml:/opt/app/aai-traversal/resources/logback.xml
@@ -61,16 +62,18 @@ services:
max-size: "30m"
max-file: "5"
aai.hbase.simpledemo.onap.org:
- image: ${HBASE_IMAGE}:${HBASE_VERSION}
+ image: cassandra:2.1
hostname: aai.hbase.simpledemo.onap.org
ports:
- - 2181:2181
- - 8080:8080
- - 8085:8085
- - 9090:9090
- - 16000:16000
- - 16010:16010
- - 16201:16201
+ - 7000:7000
+ - 7001:7001
+ - 7199:7199
+ - 9042:9042
+ environment:
+ - CASSANDRA_SEEDS=aai.hbase.simpledemo.onap.org
+ - CASSANDRA_DC=Heat
+ - CASSANDRA_RACK=Rack1
+ - CASSANDRA_AUTO_BOOTSTRAP=true
logging:
driver: "json-file"
options:
diff --git a/test/csit/plans/aai/traversal/setup.sh b/test/csit/plans/aai/traversal/setup.sh
index 118a1bc9c..70dda84b6 100644
--- a/test/csit/plans/aai/traversal/setup.sh
+++ b/test/csit/plans/aai/traversal/setup.sh
@@ -72,10 +72,8 @@ ${DOCKER_COMPOSE_CMD} stop
${DOCKER_COMPOSE_CMD} rm -f -v
# Start the hbase where the data will be stored
-HBASE_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai.hbase.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:8085';
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:8080';
-wait_for_container ${HBASE_CONTAINER_NAME} ' Started SelectChannelConnector@0.0.0.0:9095';
+CASSANDRA_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai.hbase.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
+wait_for_container $CASSANDRA_CONTAINER_NAME 'Listening for thrift clients';
USER_EXISTS=$(check_if_user_exists aaiadmin);
@@ -103,12 +101,16 @@ else
export GROUP_ID=$(id -g aaiadmin);
fi;
+$DOCKER_COMPOSE_CMD run --rm aai-resources.api.simpledemo.onap.org createDBSchema.sh
+
RESOURCES_CONTAINER_NAME=$(${DOCKER_COMPOSE_CMD} up -d aai-resources.api.simpledemo.onap.org 2>&1 | grep 'Creating' | grep -v 'volume' | grep -v 'network' | awk '{ print $2; }' | head -1);
wait_for_container $RESOURCES_CONTAINER_NAME 'Resources Microservice Started';
${DOCKER_COMPOSE_CMD} up -d aai-traversal.api.simpledemo.onap.org aai.api.simpledemo.onap.org
TRAVERSAL_CONTAINER_NAME=$(echo $RESOURCES_CONTAINER_NAME | sed 's/aai-resources/aai-traversal/g');
+$DOCKER_COMPOSE_CMD run --rm aai-traversal.api.simpledemo.onap.org install/updateQueryData.sh
+
echo "A&AI Microservices, resources and traversal, are up and running along with HAProxy";
wait_for_container $TRAVERSAL_CONTAINER_NAME 'Traversal Microservice Started';
diff --git a/test/csit/plans/appc/healthcheck/bundle_query.sh b/test/csit/plans/appc/healthcheck/bundle_query.sh
index a85bf31c4..3801d0a12 100755
--- a/test/csit/plans/appc/healthcheck/bundle_query.sh
+++ b/test/csit/plans/appc/healthcheck/bundle_query.sh
@@ -18,10 +18,10 @@ SCRIPTS="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo $SCRIPTS
-num_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | tail -1 | cut -d\| -f1)
-#num_failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Failure | wc -l)
-num_failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Failure | wc -l)
-failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Failure)
+num_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
+#num_failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure | wc -l)
+num_failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure | wc -l)
+failed_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure)
echo "There are $num_failed_bundles failed bundles out of $num_bundles installed bundles."
diff --git a/test/csit/plans/appc/healthcheck/health_check.sh b/test/csit/plans/appc/healthcheck/health_check.sh
index 63e0b17aa..e4cfae8f5 100755
--- a/test/csit/plans/appc/healthcheck/health_check.sh
+++ b/test/csit/plans/appc/healthcheck/health_check.sh
@@ -17,7 +17,7 @@
SCRIPTS="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo $SCRIPTS
-response=$(curl --write-out '%{http_code}' --silent --output /dev/null -H "Authorization: Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ==" -X POST -H "X-FromAppId: csit-appc" -H "X-TransactionId: csit-appc" -H "Accept: application/json" -H "Content-Type: application/json" http://localhost:8282/restconf/operations/SLI-API:healthcheck )
+response=$(curl --write-out '%{http_code}' --silent --output /dev/null -H "Authorization: Basic YWRtaW46YWRtaW4=" -X POST -H "X-FromAppId: csit-appc" -H "X-TransactionId: csit-appc" -H "Accept: application/json" -H "Content-Type: application/json" http://localhost:8282/restconf/operations/SLI-API:healthcheck )
if [ "$response" == "200" ]; then
echo "APPC health check passed."
diff --git a/test/csit/plans/appc/healthcheck/setup.sh b/test/csit/plans/appc/healthcheck/setup.sh
index 406743303..f47685334 100755
--- a/test/csit/plans/appc/healthcheck/setup.sh
+++ b/test/csit/plans/appc/healthcheck/setup.sh
@@ -19,19 +19,11 @@
# Place the scripts in run order:
SCRIPTS="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source ${WORKSPACE}/test/csit/scripts/appc/script1.sh
-amsterdam="$(echo ${WORKSPACE} | grep amsterdam | wc -l)"
-if [ "$amsterdam" != "1" ]; then
- export APPC_DOCKER_IMAGE_VERSION=1.3.0-SNAPSHOT-latest
- export CCSDK_DOCKER_IMAGE_VERSION=0.1-STAGING-latest
- export BRANCH=master
- export SOLUTION_NAME=onap
-else
- export APPC_DOCKER_IMAGE_VERSION=v1.2.0
- export CCSDK_DOCKER_IMAGE_VERSION=v0.1.0
- export BRANCH=amsterdam
- export SOLUTION_NAME=openecomp
-fi
+export APPC_DOCKER_IMAGE_VERSION=1.3.0-SNAPSHOT-latest
+export CCSDK_DOCKER_IMAGE_VERSION=0.2.1-SNAPSHOT
+export BRANCH=master
+export SOLUTION_NAME=onap
export NEXUS_USERNAME=docker
export NEXUS_PASSWD=docker
@@ -67,8 +59,8 @@ TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
startODL_status=$(docker exec appc_controller_container ps -e | grep startODL | wc -l)
-waiting_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf bundle:list | grep Waiting | wc -l)
-run_level=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client -u karaf system:start-level)
+waiting_bundles=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Waiting | wc -l)
+run_level=$(docker exec appc_controller_container /opt/opendaylight/current/bin/client system:start-level)
if [ "$run_level" == "Level 100" ] && [ "$startODL_status" -lt "1" ] && [ "$waiting_bundles" -lt "1" ] ; then
echo APPC started in $TIME seconds
diff --git a/test/csit/plans/dmaap/mrpubsub/setup.sh b/test/csit/plans/dmaap/mrpubsub/setup.sh
index 3e8950f2b..0a1f9d359 100755
--- a/test/csit/plans/dmaap/mrpubsub/setup.sh
+++ b/test/csit/plans/dmaap/mrpubsub/setup.sh
@@ -35,6 +35,7 @@ cp $WORKSPACE/archives/dmaapmr/messageservice/bundleconfig-local/etc/appprops/Ms
# start DMaaP MR containers with docker compose and configuration from docker-compose.yml
+docker login -u docker -p docker nexus3.onap.org:10001
docker-compose up -d
# Wait for initialization of Docker contaienr for DMaaP MR, Kafka and Zookeeper
@@ -68,6 +69,7 @@ sed -i -e 's/<zookeeper_host>/'$ZOOKEEPER_IP'/' /var/tmp/MsgRtrApi.properties
sed -i -e 's/<kafka_host>:<kafka_port>/'$KAFKA_IP':9092/' /var/tmp/MsgRtrApi.properties
docker-compose build
+docker login -u docker -p docker nexus3.onap.org:10001
docker-compose up -d
# Wait for initialization of Docker containers
diff --git a/test/csit/plans/holmes-rule-management/sanity-check/setup.sh b/test/csit/plans/holmes-rule-management/sanity-check/setup.sh
index ae78ec49a..89688eeaf 100644
--- a/test/csit/plans/holmes-rule-management/sanity-check/setup.sh
+++ b/test/csit/plans/holmes-rule-management/sanity-check/setup.sh
@@ -60,6 +60,12 @@ for i in {1..10}; do
echo sleep $i
sleep $i
done
+
+echo sleep 30s for service registration
+sleep 30
+
+docker logs i-rulemgt
+docker logs i-engine-d
#Pass any variables required by Robot test suites in ROBOT_VARIABLES
ROBOT_VARIABLES="-v MSB_IP:${MSB_IP} -v RULEMGT_IP:${RULEMGT_IP} -v ENGINE_D_IP:${ENGINE_D_IP}"
diff --git a/test/csit/plans/multicloud-vmware/functionality1/testplan.txt b/test/csit/plans/multicloud-vmware/functionality1/testplan.txt
index 2f5ad1b6f..0a2ad45af 100644
--- a/test/csit/plans/multicloud-vmware/functionality1/testplan.txt
+++ b/test/csit/plans/multicloud-vmware/functionality1/testplan.txt
@@ -7,3 +7,7 @@ multicloud-vmware/provision/sanity_test_neutron.robot
multicloud-vmware/nova/sanity-flavor.robot
multicloud-vmware/nova/sanity-host.robot
multicloud-vmware/nova/sanity-server.robot
+multicloud-vmware/samples/sanity-sample.robot
+multicloud-vmware/hosts/sanity-host.robot
+multicloud-vmware/networks/sanity-network.robot
+multicloud-vmware/images/sanity-image.robot
diff --git a/test/csit/plans/multicloud/functionality1/setup.sh b/test/csit/plans/multicloud/functionality1/setup.sh
index 993a39f68..50118a54a 100755
--- a/test/csit/plans/multicloud/functionality1/setup.sh
+++ b/test/csit/plans/multicloud/functionality1/setup.sh
@@ -20,7 +20,9 @@
source ${SCRIPTS}/common_functions.sh
# start multivim-broker
-docker run -d --name multivim-broker nexus3.onap.org:10001/onap/multicloud/framework
+docker run -d --name multivim-vio nexus3.onap.org:10001/onap/multicloud/vio
+docker run -d --name multivim-broker --link multivim-vio -e MSB_ADDR=multivim-vio -e MSB_PORT=9004 nexus3.onap.org:10001/onap/multicloud/framework
+
BROKER_IP=`get-instance-ip.sh multivim-broker`
for i in {1..50}; do
curl -sS ${BROKER_IP}:9001 && break
diff --git a/test/csit/plans/multicloud/functionality1/teardown.sh b/test/csit/plans/multicloud/functionality1/teardown.sh
index 1732649af..a2ef0e35e 100755
--- a/test/csit/plans/multicloud/functionality1/teardown.sh
+++ b/test/csit/plans/multicloud/functionality1/teardown.sh
@@ -16,4 +16,5 @@
#
# This script is sourced by run-csit.sh after Robot test completion.
+kill-instance.sh multivim-vio
kill-instance.sh multivim-broker
diff --git a/test/csit/plans/music/music-distributed-kv-store-test-plan/setup.sh b/test/csit/plans/music/music-distributed-kv-store-test-plan/setup.sh
new file mode 100644
index 000000000..88becf69e
--- /dev/null
+++ b/test/csit/plans/music/music-distributed-kv-store-test-plan/setup.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+#
+# Copyright 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Not sure why this is needed.
+source ${SCRIPTS}/common_functions.sh
+
+# Initial Configuration.
+DATASTORE="consul"
+DATASTORE_IP="localhost"
+
+MOUNTPATH="/dkv_mount_path/configs/"
+DEFAULT_CONFIGS=$(pwd)/mountpath/default
+
+mkdir -p mountpath/default
+
+pushd mountpath/default
+cat << EOF > sampleConfig1.properties
+foo1=bar1
+hello1=world1
+key1=value1
+EOF
+cat << EOF > sampleConfig2.properties
+foo2=bar2
+hello2=world2
+key2=value2
+EOF
+popd
+
+docker login -u docker -p docker nexus3.onap.org:10001
+docker pull nexus3.onap.org:10001/onap/music/distributed-kv-store
+docker run -e DATASTORE=$DATASTORE -e DATASTORE_IP=$DATASTORE_IP -e MOUNTPATH=$MOUNTPATH -d \
+ --name dkv \
+ -v $DEFAULT_CONFIGS:/dkv_mount_path/configs/default \
+ -p 8200:8200 -p 8080:8080 nexus3.onap.org:10001/onap/music/distributed-kv-store
+
+
+echo "###### WAITING FOR DISTRIBUTED KV STORE CONTAINER TO COME UP"
+sleep 10
+
+#
+# add here all ROBOT_VARIABLES settings
+#
+echo "# music robot variables settings";
+ROBOT_VARIABLES="-v DKV_HOSTNAME:http://localhost -v DKV_PORT:8080"
+
+echo ${ROBOT_VARIABLES} \ No newline at end of file
diff --git a/test/csit/plans/music/music-distributed-kv-store-test-plan/teardown.sh b/test/csit/plans/music/music-distributed-kv-store-test-plan/teardown.sh
new file mode 100644
index 000000000..0abf3a62d
--- /dev/null
+++ b/test/csit/plans/music/music-distributed-kv-store-test-plan/teardown.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Copyright 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+rm -rf mountpath
+docker stop dkv
+docker rm dkv
+docker rmi nexus3.onap.org:10001/onap/music/distributed-kv-store
diff --git a/test/csit/plans/music/music-distributed-kv-store-test-plan/testplan.txt b/test/csit/plans/music/music-distributed-kv-store-test-plan/testplan.txt
new file mode 100644
index 000000000..ac37bc5a6
--- /dev/null
+++ b/test/csit/plans/music/music-distributed-kv-store-test-plan/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+music/music-distributed-kv-store-suite \ No newline at end of file
diff --git a/test/csit/plans/music/music-test-plan/setup.sh b/test/csit/plans/music/music-test-plan/setup.sh
index 63d2ef93f..ddfdfc023 100755
--- a/test/csit/plans/music/music-test-plan/setup.sh
+++ b/test/csit/plans/music/music-test-plan/setup.sh
@@ -50,14 +50,21 @@ docker network create music-net;
# Start Cassandra
docker run -d --name music-db --network music-net -p "7000:7000" -p "7001:7001" -p "7199:7199" -p "9042:9042" -p "9160:9160" -e CASSUSER=${CASS_USERNAME} -e CASSPASS=${CASS_PASSWORD} ${CASS_IMG};
+CASSA_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music-net" }}{{ $network.IPAddress}}' music-db`
+echo "CASSANDRA_IP=${CASSA_IP}"
+${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${CASSA_IP} 9042
+
# Start Music war
docker run -d --name music-war -v music-vol:/app ${MUSIC_IMG};
# Start Zookeeper
docker run -d --name music-zk --network music-net -p "2181:2181" -p "2888:2888" -p "3888:3888" ${ZK_IMG};
-# Delay for Cassandra
-sleep 20;
+ZOO_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music-net" }}{{ $network.IPAddress}}' music-zk`
+echo "ZOOKEEPER_IP=${ZOO_IP}"
+
+# Delay between Cassandra/Zookeeper and Tomcat
+sleep 60;
# Start Up tomcat - Needs to have properties,logs dir and war file volume mapped.
docker run -d --name music-tomcat --network music-net -p "8080:8080" -v music-vol:/usr/local/tomcat/webapps -v ${WORK_DIR}/properties:/opt/app/music/etc:ro -v ${WORK_DIR}/logs:/opt/app/music/logs ${TOMCAT_IMG};
@@ -65,20 +72,6 @@ docker run -d --name music-tomcat --network music-net -p "8080:8080" -v music-vo
# Connect tomcat to host bridge network so that its port can be seen.
docker network connect bridge music-tomcat;
-##################################
-
-#
-# add here below the start of all docker containers needed for music CSIT testing
-#
-
-CASSA_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-db`
-echo "CASSANDRA_IP=${CASSA_IP}"
-
-ZOO_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-zk`
-echo "ZOOKEEPER_IP=${ZOO_IP}"
-
-${WORKSPACE}/test/csit/scripts/music/music-scripts/wait_for_port.sh ${CASSA_IP} 9042
-
TOMCAT_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-tomcat`
echo "TOMCAT_IP=${TOMCAT_IP}"
@@ -93,7 +86,10 @@ docker inspect music-war
docker volume inspect music-vol
docker network inspect music-net
-
+echo "dump music content just after music is started"
+docker exec music-db /usr/bin/nodetool status
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
#
diff --git a/test/csit/plans/portal-sdk/testsuite/.env b/test/csit/plans/portal-sdk/testsuite/.env
index 0b7de9560..27e9aa3fb 100644
--- a/test/csit/plans/portal-sdk/testsuite/.env
+++ b/test/csit/plans/portal-sdk/testsuite/.env
@@ -2,25 +2,34 @@
# used by docker-compose AND by other shell scripts
# Host directory with config files
-LOGS_DIR=./logs
-PROPS_DIR=./properties
-
-
-# Directory within containers
-WEBAPPS_DIR=/opt/apache-tomcat-8.0.37/webapps
-
# Following are ALSO used in demo/boot/portal_vm_init.sh
-EP_IMG_NAME=onap/portal-apps
+EP_IMG_NAME=onap/portal-app
+SDK_IMG_NAME=onap/portal-sdk
DB_IMG_NAME=onap/portal-db
+CDR_IMG_NAME=onap/music/cassandra_music
+ZK_IMG_NAME=zookeeper
WMS_IMG_NAME=onap/portal-wms
+# Deployed with portal; built elsewhere
CLI_IMG_NAME=onap/cli
# Tag all images with this
-PORTAL_TAG=1.3.0
-DOCKER_IMAGE_VERSION=1.3-STAGING-latest
+DOCKER_IMAGE_VERSION=2.1-STAGING-latest
CLI_DOCKER_VERSION=1.1-STAGING-latest
+CDR_IMAGE_VERSION=latest
+ZK_IMAGE_VERSION=3.4
NEXUS_DOCKER_REPO=nexus3.onap.org:10003
+# This is used during builds and in docker-compose;
+# it is never published to the ONAP registry.
+PORTAL_TAG=beijing
+
+# Name of directory in apps container (NOT host)
+WEBAPPS_DIR=/opt/apache-tomcat-8.0.37/webapps
+
+# Required settings with default values.
+# Export shell environment variables on ALL hosts.
+LOGS_DIR=./logs
+PROPS_DIR=./properties
# Optional settings with no defaults.
EXTRA_HOST_IP=""
diff --git a/test/csit/plans/portal-sdk/testsuite/docker-compose.yml b/test/csit/plans/portal-sdk/testsuite/docker-compose.yml
index 7a9fb8caa..dda74c91a 100644
--- a/test/csit/plans/portal-sdk/testsuite/docker-compose.yml
+++ b/test/csit/plans/portal-sdk/testsuite/docker-compose.yml
@@ -1,11 +1,12 @@
# docker-compose for ONAP portal containers: database, microservice, portal apps.
-# Relies on .env file in current directory.
+# Relies on .env file, which CANNOT be specified via command-line option
# Works in multiple environments; does not pull from a Nexus registry.
# Exposes the portal apps docker (but not DB nor WMS dockers) on the host network.
# Images must be pulled from ONAP Nexus registry after logging in like this:
# docker login -u USER -p PASS nexus3.onap.org:10001
+# Uses healthcheck feature added in docker-compose v2.1
-version: '2.0'
+version: '2.1'
services:
@@ -29,11 +30,51 @@ services:
volumes:
# Just specify a path and let the Engine create a volume
- /var/lib/mysql
+ # Inject the onboarding script at start time
+ - ./Apps_Users_OnBoarding_Script.sql:/docker-entrypoint-initdb.d/zzz_apps_users_onboarding.sql
logging:
driver: json-file
+ healthcheck:
+ test: [ "CMD", "mysqladmin", "ping", "-h", "localhost" ]
+ timeout: 10s
+ retries: 30
+
+ # Config files may use hostname "portal-cassandra"
+ portal-cassandra:
+ image: ${CDR_IMG_NAME}:${PORTAL_TAG}
+ environment:
+ - CASSUSER=root
+ - CASSPASS=Aa123456
+ expose:
+ - 7000
+ - 7001
+ - 7199
+ - 9042
+ - 9160
+ ports:
+ - 7000:7000
+ - 7001:7001
+ - 7199:7199
+ - 9042:9042
+ - 9160:9160
+ volumes:
+ - ./portal.cql:/docker-entrypoint-initdb.d/zzz_portal.cql
+ - ./portalsdk.cql:/docker-entrypoint-initdb.d/zzz_portalsdk.cql
+ links:
+ - portal-db
+ depends_on:
+ portal-db:
+ condition: service_healthy
+
+ # Config files may use hostname "portal-zk"
+ portal-zk:
+ image: ${ZK_IMG_NAME}:${PORTAL_TAG}
+ expose:
+ - 2181
+ ports:
+ - 2181:2181
- # An environment variable here CAN override the database URL;
- # instead the value in the config file uses hostname from above
+ # The app config file uses the docker name above
portal-wms:
image: ${WMS_IMG_NAME}:${PORTAL_TAG}
expose:
@@ -41,25 +82,18 @@ services:
links:
- portal-db
depends_on:
- - portal-db
+ portal-db:
+ condition: service_healthy
volumes:
- - ${PROPS_DIR}/ECOMPWIDGETMS/application.properties:/application.properties
+ - ${PROPS_DIR}/ONAPWIDGETMS/application.properties:/application.properties
+ - ${PROPS_DIR}/ONAPWIDGETMS/application.yml:/application.yml
command:
- - /wait-for.sh
- - -t
- - "420"
- - portal-db:3306
- - --
- - /start-wms-cmd.sh
+ - /start-wms.sh
logging:
driver: json-file
- # Environment variables here CANNOT override the database URL because
- # two apps use identical configuration keys with different values
- portal-apps:
+ portal-app:
image: ${EP_IMG_NAME}:${PORTAL_TAG}
- expose:
- - 8989
ports:
- 8989:8080
- 8010:8009
@@ -67,32 +101,51 @@ services:
links:
- portal-db
- portal-wms
+ - portal-zk
+ - portal-cassandra
depends_on:
- - portal-db
- - portal-wms
+ portal-db:
+ condition: service_healthy
+ portal-wms:
+ condition: service_started
volumes:
- - ${PROPS_DIR}/ECOMPPORTALAPP/system.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/fusion/conf/fusion.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/portal.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/openid-connect.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/openid-connect.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/logback.xml:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/logback.xml
- - ${PROPS_DIR}/ECOMPSDKAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/fusion/conf/fusion.properties
- - ${PROPS_DIR}/ECOMPSDKAPP/system.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPSDKAPP/portal.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/system.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/portal.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/dbcapp.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/dbcapp/dbcapp.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTAL/system.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/conf/system.properties
+ - ${PROPS_DIR}/ONAPPORTAL/fusion.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTAL/portal.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/portal.properties
+ - ${PROPS_DIR}/ONAPPORTAL/music.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/music.properties
+ - ${PROPS_DIR}/ONAPPORTAL/openid-connect.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/openid-connect.properties
+ - ${PROPS_DIR}/ONAPPORTAL/logback.xml:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/logback.xml
- ${LOGS_DIR}:/opt/apache-tomcat-8.0.37/logs
command:
- - /wait-for.sh
- - -t
- - "420"
- - portal-db:3306
- - --
- - /start-apps-cmd.sh
+ - /start-apache-tomcat.sh
# see comments in .env file
+ - -i
- $EXTRA_HOST_IP
+ - -n
- $EXTRA_HOST_NAME
logging:
driver: json-file
+
+ portal-sdk:
+ image: ${SDK_IMG_NAME}:${PORTAL_TAG}
+ ports:
+ - 8990:8080
+ links:
+ - portal-db
+ - portal-wms
+ - portal-zk
+ - portal-cassandra
+ depends_on:
+ portal-db:
+ condition: service_healthy
+ volumes:
+ - ${PROPS_DIR}/ONAPPORTALSDK/fusion.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/system.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/conf/system.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/portal.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/portal.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/music.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/music.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/logback.xml:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/logback.xml
+ - ${LOGS_DIR}:/opt/apache-tomcat-8.0.37/logs
+ command:
+ - /start-apache-tomcat.sh
+ logging:
+ driver: json-file
diff --git a/test/csit/plans/portal-sdk/testsuite/setup.sh b/test/csit/plans/portal-sdk/testsuite/setup.sh
index 0c90dc66b..6510311cb 100644
--- a/test/csit/plans/portal-sdk/testsuite/setup.sh
+++ b/test/csit/plans/portal-sdk/testsuite/setup.sh
@@ -48,7 +48,7 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
+git clone http://gerrit.onap.org/r/portal -b "master"
# Refresh configuration and scripts
cd portal
@@ -80,15 +80,21 @@ mkdir -p $LOGS_DIR
# Refresh images
docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO
-docker pull $NEXUS_DOCKER_REPO/${DB_IMG_NAME}:$DOCKER_IMAGE_VERSION
-docker pull $NEXUS_DOCKER_REPO/${EP_IMG_NAME}:$DOCKER_IMAGE_VERSION
-docker pull $NEXUS_DOCKER_REPO/${WMS_IMG_NAME}:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$DB_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$EP_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$SDK_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$CDR_IMG_NAME:$CDR_IMAGE_VERSION
+docker pull $ZK_IMG_NAME:$ZK_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$WMS_IMG_NAME:$DOCKER_IMAGE_VERSION
docker pull $NEXUS_DOCKER_REPO/$CLI_IMG_NAME:$CLI_DOCKER_VERSION
# Tag them as expected by docker-compose file
-docker tag $NEXUS_DOCKER_REPO/${DB_IMG_NAME}:$DOCKER_IMAGE_VERSION $DB_IMG_NAME:$PORTAL_TAG
-docker tag $NEXUS_DOCKER_REPO/${EP_IMG_NAME}:$DOCKER_IMAGE_VERSION $EP_IMG_NAME:$PORTAL_TAG
-docker tag $NEXUS_DOCKER_REPO/${WMS_IMG_NAME}:$DOCKER_IMAGE_VERSION $WMS_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$DB_IMG_NAME:$DOCKER_IMAGE_VERSION $DB_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$EP_IMG_NAME:$DOCKER_IMAGE_VERSION $EP_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$SDK_IMG_NAME:$DOCKER_IMAGE_VERSION $SDK_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$CDR_IMG_NAME:$CDR_IMAGE_VERSION $CDR_IMG_NAME:$PORTAL_TAG
+docker tag $ZK_IMG_NAME:$ZK_IMAGE_VERSION $ZK_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$WMS_IMG_NAME:$DOCKER_IMAGE_VERSION $WMS_IMG_NAME:$PORTAL_TAG
docker tag $NEXUS_DOCKER_REPO/$CLI_IMG_NAME:$CLI_DOCKER_VERSION $CLI_IMG_NAME:$PORTAL_TAG
@@ -130,7 +136,7 @@ fi
-sleep 3m
+sleep 6m
# WAIT 5 minutes maximum and test every 5 seconds if Portal up using HealthCheck API
TIME_OUT=500
@@ -170,7 +176,7 @@ HOST_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
export HOST_IP=${HOST_IP}
#docker logs deliveries_portal-db_1
-docker logs deliveries_portal-apps_1
+docker logs deliveries_portal-app_1
docker logs deliveries_portal-wms_1
diff --git a/test/csit/plans/portal/testsuite/.env b/test/csit/plans/portal/testsuite/.env
index 0b7de9560..27e9aa3fb 100644
--- a/test/csit/plans/portal/testsuite/.env
+++ b/test/csit/plans/portal/testsuite/.env
@@ -2,25 +2,34 @@
# used by docker-compose AND by other shell scripts
# Host directory with config files
-LOGS_DIR=./logs
-PROPS_DIR=./properties
-
-
-# Directory within containers
-WEBAPPS_DIR=/opt/apache-tomcat-8.0.37/webapps
-
# Following are ALSO used in demo/boot/portal_vm_init.sh
-EP_IMG_NAME=onap/portal-apps
+EP_IMG_NAME=onap/portal-app
+SDK_IMG_NAME=onap/portal-sdk
DB_IMG_NAME=onap/portal-db
+CDR_IMG_NAME=onap/music/cassandra_music
+ZK_IMG_NAME=zookeeper
WMS_IMG_NAME=onap/portal-wms
+# Deployed with portal; built elsewhere
CLI_IMG_NAME=onap/cli
# Tag all images with this
-PORTAL_TAG=1.3.0
-DOCKER_IMAGE_VERSION=1.3-STAGING-latest
+DOCKER_IMAGE_VERSION=2.1-STAGING-latest
CLI_DOCKER_VERSION=1.1-STAGING-latest
+CDR_IMAGE_VERSION=latest
+ZK_IMAGE_VERSION=3.4
NEXUS_DOCKER_REPO=nexus3.onap.org:10003
+# This is used during builds and in docker-compose;
+# it is never published to the ONAP registry.
+PORTAL_TAG=beijing
+
+# Name of directory in apps container (NOT host)
+WEBAPPS_DIR=/opt/apache-tomcat-8.0.37/webapps
+
+# Required settings with default values.
+# Export shell environment variables on ALL hosts.
+LOGS_DIR=./logs
+PROPS_DIR=./properties
# Optional settings with no defaults.
EXTRA_HOST_IP=""
diff --git a/test/csit/plans/portal/testsuite/docker-compose.yml b/test/csit/plans/portal/testsuite/docker-compose.yml
index 7a9fb8caa..dda74c91a 100644
--- a/test/csit/plans/portal/testsuite/docker-compose.yml
+++ b/test/csit/plans/portal/testsuite/docker-compose.yml
@@ -1,11 +1,12 @@
# docker-compose for ONAP portal containers: database, microservice, portal apps.
-# Relies on .env file in current directory.
+# Relies on .env file, which CANNOT be specified via command-line option
# Works in multiple environments; does not pull from a Nexus registry.
# Exposes the portal apps docker (but not DB nor WMS dockers) on the host network.
# Images must be pulled from ONAP Nexus registry after logging in like this:
# docker login -u USER -p PASS nexus3.onap.org:10001
+# Uses healthcheck feature added in docker-compose v2.1
-version: '2.0'
+version: '2.1'
services:
@@ -29,11 +30,51 @@ services:
volumes:
# Just specify a path and let the Engine create a volume
- /var/lib/mysql
+ # Inject the onboarding script at start time
+ - ./Apps_Users_OnBoarding_Script.sql:/docker-entrypoint-initdb.d/zzz_apps_users_onboarding.sql
logging:
driver: json-file
+ healthcheck:
+ test: [ "CMD", "mysqladmin", "ping", "-h", "localhost" ]
+ timeout: 10s
+ retries: 30
+
+ # Config files may use hostname "portal-cassandra"
+ portal-cassandra:
+ image: ${CDR_IMG_NAME}:${PORTAL_TAG}
+ environment:
+ - CASSUSER=root
+ - CASSPASS=Aa123456
+ expose:
+ - 7000
+ - 7001
+ - 7199
+ - 9042
+ - 9160
+ ports:
+ - 7000:7000
+ - 7001:7001
+ - 7199:7199
+ - 9042:9042
+ - 9160:9160
+ volumes:
+ - ./portal.cql:/docker-entrypoint-initdb.d/zzz_portal.cql
+ - ./portalsdk.cql:/docker-entrypoint-initdb.d/zzz_portalsdk.cql
+ links:
+ - portal-db
+ depends_on:
+ portal-db:
+ condition: service_healthy
+
+ # Config files may use hostname "portal-zk"
+ portal-zk:
+ image: ${ZK_IMG_NAME}:${PORTAL_TAG}
+ expose:
+ - 2181
+ ports:
+ - 2181:2181
- # An environment variable here CAN override the database URL;
- # instead the value in the config file uses hostname from above
+ # The app config file uses the docker name above
portal-wms:
image: ${WMS_IMG_NAME}:${PORTAL_TAG}
expose:
@@ -41,25 +82,18 @@ services:
links:
- portal-db
depends_on:
- - portal-db
+ portal-db:
+ condition: service_healthy
volumes:
- - ${PROPS_DIR}/ECOMPWIDGETMS/application.properties:/application.properties
+ - ${PROPS_DIR}/ONAPWIDGETMS/application.properties:/application.properties
+ - ${PROPS_DIR}/ONAPWIDGETMS/application.yml:/application.yml
command:
- - /wait-for.sh
- - -t
- - "420"
- - portal-db:3306
- - --
- - /start-wms-cmd.sh
+ - /start-wms.sh
logging:
driver: json-file
- # Environment variables here CANNOT override the database URL because
- # two apps use identical configuration keys with different values
- portal-apps:
+ portal-app:
image: ${EP_IMG_NAME}:${PORTAL_TAG}
- expose:
- - 8989
ports:
- 8989:8080
- 8010:8009
@@ -67,32 +101,51 @@ services:
links:
- portal-db
- portal-wms
+ - portal-zk
+ - portal-cassandra
depends_on:
- - portal-db
- - portal-wms
+ portal-db:
+ condition: service_healthy
+ portal-wms:
+ condition: service_started
volumes:
- - ${PROPS_DIR}/ECOMPPORTALAPP/system.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/fusion/conf/fusion.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/portal.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/openid-connect.properties:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/openid-connect.properties
- - ${PROPS_DIR}/ECOMPPORTALAPP/logback.xml:${WEBAPPS_DIR}/ECOMPPORTAL/WEB-INF/classes/logback.xml
- - ${PROPS_DIR}/ECOMPSDKAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/fusion/conf/fusion.properties
- - ${PROPS_DIR}/ECOMPSDKAPP/system.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPSDKAPP/portal.properties:${WEBAPPS_DIR}/ECOMPSDKAPP/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/system.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/conf/system.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/portal.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/classes/portal.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/dbcapp.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/dbcapp/dbcapp.properties
- - ${PROPS_DIR}/ECOMPDBCAPP/fusion.properties:${WEBAPPS_DIR}/ECOMPDBCAPP/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTAL/system.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/conf/system.properties
+ - ${PROPS_DIR}/ONAPPORTAL/fusion.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTAL/portal.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/portal.properties
+ - ${PROPS_DIR}/ONAPPORTAL/music.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/music.properties
+ - ${PROPS_DIR}/ONAPPORTAL/openid-connect.properties:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/openid-connect.properties
+ - ${PROPS_DIR}/ONAPPORTAL/logback.xml:${WEBAPPS_DIR}/ONAPPORTAL/WEB-INF/classes/logback.xml
- ${LOGS_DIR}:/opt/apache-tomcat-8.0.37/logs
command:
- - /wait-for.sh
- - -t
- - "420"
- - portal-db:3306
- - --
- - /start-apps-cmd.sh
+ - /start-apache-tomcat.sh
# see comments in .env file
+ - -i
- $EXTRA_HOST_IP
+ - -n
- $EXTRA_HOST_NAME
logging:
driver: json-file
+
+ portal-sdk:
+ image: ${SDK_IMG_NAME}:${PORTAL_TAG}
+ ports:
+ - 8990:8080
+ links:
+ - portal-db
+ - portal-wms
+ - portal-zk
+ - portal-cassandra
+ depends_on:
+ portal-db:
+ condition: service_healthy
+ volumes:
+ - ${PROPS_DIR}/ONAPPORTALSDK/fusion.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/fusion/conf/fusion.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/system.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/conf/system.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/portal.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/portal.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/music.properties:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/music.properties
+ - ${PROPS_DIR}/ONAPPORTALSDK/logback.xml:${WEBAPPS_DIR}/ONAPPORTALSDK/WEB-INF/classes/logback.xml
+ - ${LOGS_DIR}:/opt/apache-tomcat-8.0.37/logs
+ command:
+ - /start-apache-tomcat.sh
+ logging:
+ driver: json-file
diff --git a/test/csit/plans/portal/testsuite/setup.sh b/test/csit/plans/portal/testsuite/setup.sh
index 0c90dc66b..76cf5f373 100644
--- a/test/csit/plans/portal/testsuite/setup.sh
+++ b/test/csit/plans/portal/testsuite/setup.sh
@@ -48,7 +48,7 @@ NEXUS_DOCKER_REPO=nexus3.onap.org:10003
CURR="$(pwd)"
-git clone http://gerrit.onap.org/r/portal -b "release-1.3.0"
+git clone http://gerrit.onap.org/r/portal -b "master"
# Refresh configuration and scripts
cd portal
@@ -80,15 +80,21 @@ mkdir -p $LOGS_DIR
# Refresh images
docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO
-docker pull $NEXUS_DOCKER_REPO/${DB_IMG_NAME}:$DOCKER_IMAGE_VERSION
-docker pull $NEXUS_DOCKER_REPO/${EP_IMG_NAME}:$DOCKER_IMAGE_VERSION
-docker pull $NEXUS_DOCKER_REPO/${WMS_IMG_NAME}:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$DB_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$EP_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$SDK_IMG_NAME:$DOCKER_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$CDR_IMG_NAME:$CDR_IMAGE_VERSION
+docker pull $ZK_IMG_NAME:$ZK_IMAGE_VERSION
+docker pull $NEXUS_DOCKER_REPO/$WMS_IMG_NAME:$DOCKER_IMAGE_VERSION
docker pull $NEXUS_DOCKER_REPO/$CLI_IMG_NAME:$CLI_DOCKER_VERSION
# Tag them as expected by docker-compose file
-docker tag $NEXUS_DOCKER_REPO/${DB_IMG_NAME}:$DOCKER_IMAGE_VERSION $DB_IMG_NAME:$PORTAL_TAG
-docker tag $NEXUS_DOCKER_REPO/${EP_IMG_NAME}:$DOCKER_IMAGE_VERSION $EP_IMG_NAME:$PORTAL_TAG
-docker tag $NEXUS_DOCKER_REPO/${WMS_IMG_NAME}:$DOCKER_IMAGE_VERSION $WMS_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$DB_IMG_NAME:$DOCKER_IMAGE_VERSION $DB_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$EP_IMG_NAME:$DOCKER_IMAGE_VERSION $EP_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$SDK_IMG_NAME:$DOCKER_IMAGE_VERSION $SDK_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$CDR_IMG_NAME:$CDR_IMAGE_VERSION $CDR_IMG_NAME:$PORTAL_TAG
+docker tag $ZK_IMG_NAME:$ZK_IMAGE_VERSION $ZK_IMG_NAME:$PORTAL_TAG
+docker tag $NEXUS_DOCKER_REPO/$WMS_IMG_NAME:$DOCKER_IMAGE_VERSION $WMS_IMG_NAME:$PORTAL_TAG
docker tag $NEXUS_DOCKER_REPO/$CLI_IMG_NAME:$CLI_DOCKER_VERSION $CLI_IMG_NAME:$PORTAL_TAG
@@ -130,7 +136,7 @@ fi
-sleep 3m
+sleep 6m
# WAIT 5 minutes maximum and test every 5 seconds if Portal up using HealthCheck API
TIME_OUT=500
@@ -169,8 +175,8 @@ fi
HOST_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
export HOST_IP=${HOST_IP}
-#docker logs deliveries_portal-db_1
-docker logs deliveries_portal-apps_1
+docker logs deliveries_portal-db_1
+docker logs deliveries_portal-app_1
docker logs deliveries_portal-wms_1
diff --git a/test/csit/plans/sdc/uiSanity/setup.sh b/test/csit/plans/sdc/uiSanity/setup.sh
index fece98ff1..8f2bafda9 100644
--- a/test/csit/plans/sdc/uiSanity/setup.sh
+++ b/test/csit/plans/sdc/uiSanity/setup.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-source ${WORKSPACE}/test/csit/scripts/sdc/clone_and_setup_sdc_data.sh
+source ${WORKSPACE}/test/csit/scripts/sdc/setup_sdc_for_ui_sanity.sh
BE_IP=`get-instance-ip.sh sdc-BE`
echo BE_IP=${BE_IP}
diff --git a/test/csit/plans/so/sanity-check/setup.sh b/test/csit/plans/so/integration-testing/setup.sh
index cd0cbdc33..cd0cbdc33 100644
--- a/test/csit/plans/so/sanity-check/setup.sh
+++ b/test/csit/plans/so/integration-testing/setup.sh
diff --git a/test/csit/plans/so/sanity-check/teardown.sh b/test/csit/plans/so/integration-testing/teardown.sh
index 1696c745c..1696c745c 100644
--- a/test/csit/plans/so/sanity-check/teardown.sh
+++ b/test/csit/plans/so/integration-testing/teardown.sh
diff --git a/test/csit/plans/so/sanity-check/testplan.txt b/test/csit/plans/so/integration-testing/testplan.txt
index 761c75c03..761c75c03 100644
--- a/test/csit/plans/so/sanity-check/testplan.txt
+++ b/test/csit/plans/so/integration-testing/testplan.txt
diff --git a/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/setup.sh
new file mode 100644
index 000000000..4d97f33eb
--- /dev/null
+++ b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/setup.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+#
+# Copyright 2018 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Place the scripts in run order:
+# Start all process required for executing test case
+
+#login to the onap nexus docker repo
+docker login -u docker -p docker nexus3.onap.org:10001
+
+# Start MSB
+docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
+CONSUL_IP=`get-instance-ip.sh msb_consul`
+echo CONSUL_IP=${CONSUL_IP}
+docker run -d -p 10081:10081 -e CONSUL_IP=$CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
+DISCOVERY_IP=`get-instance-ip.sh msb_discovery`
+echo DISCOVERY_IP=${DISCOVERY_IP}
+docker run -d -p 80:80 -e CONSUL_IP=$CONSUL_IP -e SDCLIENT_IP=$DISCOVERY_IP --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway
+MSB_IP==`get-instance-ip.sh msb_internal_apigateway`
+echo MSB_IP=${MSB_IP}
+
+# Start resmgr
+docker run -d --name vfc-multivimproxy -e MSB_ADDR=${MSB_IP}:80 nexus3.onap.org:10001/onap/vfc/multivimproxy
+RESMGR_IP=`get-instance-ip.sh vfc-multivimproxy`
+for i in {1..20}; do
+ curl -sS ${RESMGR_IP}:8486 && break
+ echo sleep $i
+ sleep $i
+done
+
+# Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v MSB_IP:${MSB_IP} -v RESMGR_IP:${RESMGR_IP}"
diff --git a/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/teardown.sh b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/teardown.sh
new file mode 100644
index 000000000..c158d1997
--- /dev/null
+++ b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/teardown.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+#
+# Copyright 2018 Huawei Technologies Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This script is sourced by run-csit.sh after Robot test completion.
+kill-instance.sh msb_internal_apigateway
+kill-instance.sh msb_discovery
+kill-instance.sh msb_consul
+kill-instance.sh vfc-multivimproxy
diff --git a/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/testplan.txt b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/testplan.txt
new file mode 100644
index 000000000..f81be8ecf
--- /dev/null
+++ b/test/csit/plans/vfc-nfvo-multivimproxy/sanity-check/testplan.txt
@@ -0,0 +1,3 @@
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+vfc/nfvo-multivimproxy/test.robot \ No newline at end of file
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
index 5f6910bdd..ff9f4d5d6 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/testplan.txt
@@ -1,4 +1,4 @@
-# Test suites are relative paths under [integration.git]/test/csit/tests/.
-# Place the suites in run order.
-
+# Test suites are relative paths under [integration.git]/test/csit/tests/.
+# Place the suites in run order.
+
vfc/nfvo-wfengine/workflow.robot \ No newline at end of file
diff --git a/test/csit/plans/vnfsdk-ice/sanity-check/setup.sh b/test/csit/plans/vnfsdk-ice/sanity-check/setup.sh
index 7e777a133..22d9acddc 100644
--- a/test/csit/plans/vnfsdk-ice/sanity-check/setup.sh
+++ b/test/csit/plans/vnfsdk-ice/sanity-check/setup.sh
@@ -18,7 +18,7 @@
#Start ice server
-docker run --rm --name vnfsdk-ice -d -p 5000:5000 onap/vnfsdk/ice
+docker run --name vnfsdk-ice -d -p 5000:5000 nexus3.onap.org:10001/onap/vnfsdk/ice:latest
# Wait for server initialization
echo Wait for vnfsdk-ice initialization
diff --git a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
index e6e12f2d5..92e2f02ff 100755
--- a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
+++ b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
@@ -34,7 +34,7 @@ cd clamp/extra/docker/clamp/
sed -i '/image: onap\/clamp/c\ image: nexus3.onap.org:10001\/onap\/clamp' docker-compose.yml
# Change config to take third_party_proxy:8085 for SDC, Policy and DCAE simulator
-sed -i 's/\"classpath:\/clds\/clds-policy-config.properties\"/\"file:.\/config\/clds-policy-config-third_party_proxy.properties\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\"/g' clamp.env
+sed -i 's/}/,\"clamp.config.policy.pdpUrl1\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.pdpUrl2\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.papUrl\":\"http:\/\/third_party_proxy:8085\/pap\/ , testpap, alpha123\",\"clamp.config.policy.clientId\":\"python\",\"clamp.config.policy.clientKey\":\"dGVzdA==\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\"}/g' clamp.env
# Add the sql to create template so it is played by docker-compose later
cp ../../../src/test/resources/sql/four_templates_only.sql ../../sql/bulkload/
diff --git a/test/csit/scripts/clamp/start_clamp_containers.sh b/test/csit/scripts/clamp/start_clamp_containers.sh
index e49a591dd..11ddda1e4 100755
--- a/test/csit/scripts/clamp/start_clamp_containers.sh
+++ b/test/csit/scripts/clamp/start_clamp_containers.sh
@@ -51,7 +51,7 @@ fi
# To avoid some problem because templates not yet read
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null -u admin:5f4dcc3b5aa765d61d8327deb882cf99 http://localhost:8080/restservices/clds/v1/cldsTempate/template-names); echo $response
+ response=$(curl --write-out '%{http_code}' --silent --output /dev/null -u admin:password http://localhost:8080/restservices/clds/v1/cldsTempate/template-names); echo $response
if [ "$response" == "200" ]; then
echo Templates well available
diff --git a/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap b/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap
index d3e391cde..0f9e7494d 100644
--- a/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap
+++ b/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap
@@ -247,9 +247,15 @@ timeout = 3600
#
# From conductor
#
+music_new_version = True
# Base URL for Music REST API without a trailing slash. (string value)
server_url = http://localhost:8080/MUSIC/rest/v2
+version = v2
+music_version = "2.4.22"
+aafuser = conductor
+aafpass = c0nduct0r
+aafns = conductor
# DEPRECATED: List of hostnames (round-robin access) (list value)
# This option is deprecated for removal.
diff --git a/test/csit/scripts/optf-has/has/has_script.sh b/test/csit/scripts/optf-has/has/has_script.sh
index 242f5f63e..ac907eea1 100755
--- a/test/csit/scripts/optf-has/has/has_script.sh
+++ b/test/csit/scripts/optf-has/has/has_script.sh
@@ -53,21 +53,32 @@ sed -i -e "s%localhost:8080/MUSIC%${MUSIC_IP}:8080/MUSIC%g" /tmp/conductor/prop
AAISIM_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' aaisim`
echo "AAISIM_IP=${AAISIM_IP}"
-# change MUSIC reference to the local instance
+# change AAI reference to the local instance
sed -i -e "s%localhost:8081/%${AAISIM_IP}:8081/%g" /tmp/conductor/properties/conductor.conf
+MULTICLOUDSIM_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' multicloudsim`
+echo "MULTICLOUDSIM_IP=${MULTICLOUDSIM_IP}"
+
+# change MULTICLOUD reference to the local instance
+sed -i -e "s%localhost:8082/%${MULTICLOUDSIM_IP}:8082/%g" /tmp/conductor/properties/conductor.conf
+
+#onboard conductor into music
+curl -vvvvv --noproxy "*" --request POST http://${MUSIC_IP}:8080/MUSIC/rest/v2/admin/onboardAppWithMusic -H "Content-Type: application/json" --data @${WORKSPACE}/test/csit/tests/optf-has/has/data/onboard.json
+
docker run -d --name cond-cont -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-controller --config-file=/usr/local/bin/conductor.conf
+sleep 2
docker run -d --name cond-api -p "8091:8091" -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-api --port=8091 -- --config-file=/usr/local/bin/conductor.conf
+sleep 2
docker run -d --name cond-solv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-solver --config-file=/usr/local/bin/conductor.conf
+sleep 2
docker run -d --name cond-resv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-reservation --config-file=/usr/local/bin/conductor.conf
+sleep 2
docker run -d --name cond-data -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf -v ${CERT}:/usr/local/bin/cert.cer -v ${KEY}:/usr/local/bin/cert.key -v ${BUNDLE}:/usr/local/bin/cert.pem ${IMAGE_NAME}:latest python /usr/local/bin/conductor-data --config-file=/usr/local/bin/conductor.conf
+sleep 2
COND_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' cond-api`
${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${COND_IP} 8091
-# wait a while before continuing
-sleep 5
-
echo "inspect docker things for tracing purpose"
docker inspect cond-data
docker inspect cond-cont
@@ -75,6 +86,7 @@ docker inspect cond-api
docker inspect cond-solv
docker inspect cond-resv
-docker exec -it music-db /usr/bin/nodetool status
-docker exec -it music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
-docker exec -it music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
+echo "dump music content just after conductor is started"
+docker exec music-db /usr/bin/nodetool status
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
diff --git a/test/csit/scripts/optf-has/has/has_teardown_script.sh b/test/csit/scripts/optf-has/has/has_teardown_script.sh
index 40a536a87..bef28b435 100755
--- a/test/csit/scripts/optf-has/has/has_teardown_script.sh
+++ b/test/csit/scripts/optf-has/has/has_teardown_script.sh
@@ -14,6 +14,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+echo "print meaningful data before scratching everything"
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM conductor.plans'
+
echo "optf/has scripts docker containers killing";
docker stop cond-api
docker stop cond-solv
diff --git a/test/csit/scripts/optf-has/has/music_script.sh b/test/csit/scripts/optf-has/has/music_script.sh
index be4eb8c63..fdd9a3e26 100755
--- a/test/csit/scripts/optf-has/has/music_script.sh
+++ b/test/csit/scripts/optf-has/has/music_script.sh
@@ -47,15 +47,22 @@ docker network create music-net;
# Start Cassandra
docker run -d --name music-db --network music-net -p "7000:7000" -p "7001:7001" -p "7199:7199" -p "9042:9042" -p "9160:9160" -e CASSUSER=${CASS_USERNAME} -e CASSPASS=${CASS_PASSWORD} ${CASS_IMG};
+#CASSA_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-db`
+CASSA_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music-net" }}{{ $network.IPAddress}}' music-db`
+echo "CASSANDRA_IP=${CASSA_IP}"
+${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${CASSA_IP} 9042
# Start Music war
docker run -d --name music-war -v music-vol:/app ${MUSIC_IMG};
# Start Zookeeper
docker run -d --name music-zk --network music-net -p "2181:2181" -p "2888:2888" -p "3888:3888" ${ZK_IMG};
+#ZOO_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-zk`
+ZOO_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music-net" }}{{ $network.IPAddress}}' music-zk`
+echo "ZOOKEEPER_IP=${ZOO_IP}"
-# Delay for Cassandra
-sleep 20;
+# Delay between Cassandra/Zookeeper and Tomcat
+sleep 60;
# Start Up tomcat - Needs to have properties,logs dir and war file volume mapped.
docker run -d --name music-tomcat --network music-net -p "8080:8080" -v music-vol:/usr/local/tomcat/webapps -v ${WORK_DIR}/properties:/opt/app/music/etc:ro -v ${WORK_DIR}/logs:/opt/app/music/logs ${TOMCAT_IMG};
@@ -67,19 +74,13 @@ docker network connect bridge music-tomcat;
# add here below the start of all docker containers needed for music CSIT testing
#
-CASSA_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-db`
-echo "CASSANDRA_IP=${CASSA_IP}"
-
-ZOO_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-zk`
-echo "ZOOKEEPER_IP=${ZOO_IP}"
-
-${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${CASSA_IP} 9042
-
TOMCAT_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-tomcat`
echo "TOMCAT_IP=${TOMCAT_IP}"
${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${TOMCAT_IP} 8080
+# wait a while to make sure music is totally up and configured
+sleep 10
echo "inspect docker things for tracing purpose"
docker inspect music-db
@@ -89,8 +90,10 @@ docker inspect music-war
docker volume inspect music-vol
docker network inspect music-net
-
-
+echo "dump music content just after music is started"
+docker exec music-db /usr/bin/nodetool status
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
diff --git a/test/csit/scripts/optf-has/has/music_teardown_script.sh b/test/csit/scripts/optf-has/has/music_teardown_script.sh
index fa91f6e97..605ebd4e8 100755
--- a/test/csit/scripts/optf-has/has/music_teardown_script.sh
+++ b/test/csit/scripts/optf-has/has/music_teardown_script.sh
@@ -34,6 +34,6 @@ echo "dump music.log files"
ls -alF /tmp/music
ls -alF /tmp/music/properties
cat /tmp/music/properties/music.properties
-cat /tmp/music/logs/MUSIC/music.log
+#cat /tmp/music/logs/MUSIC/music.log
cat /tmp/music/logs/MUSIC/error.log
diff --git a/test/csit/scripts/optf-has/has/simulator_script.sh b/test/csit/scripts/optf-has/has/simulator_script.sh
index 524728270..2e8a0e6f4 100755
--- a/test/csit/scripts/optf-has/has/simulator_script.sh
+++ b/test/csit/scripts/optf-has/has/simulator_script.sh
@@ -30,11 +30,10 @@ cd ${DIR}
# omit the -p parameter to create a temporal directory in the default location
WORK_DIR=`mktemp -d -p "$DIR"`
echo ${WORK_DIR}
-
cd ${WORK_DIR}
+# clone optf-has project
git clone https://gerrit.onap.org/r/optf/has
-cd has/conductor/conductor/tests/functional/simulators/aaisim/
#echo "i am ${USER} : only non jenkins users may need proxy settings"
if [ ${USER} != 'jenkins' ]; then
@@ -44,6 +43,9 @@ if [ ${USER} != 'jenkins' ]; then
fi
+# prepare aaisim
+cd ${WORK_DIR}/has/conductor/conductor/tests/functional/simulators/aaisim/
+
# check Dockerfile content
cat ./Dockerfile
@@ -58,8 +60,26 @@ echo "AAISIM_IP=${AAISIM_IP}"
${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${AAISIM_IP} 8081
+# prepare multicloudsim
+cd ${WORK_DIR}/has/conductor/conductor/tests/functional/simulators/multicloudsim/
+
+# check Dockerfile content
+cat ./Dockerfile
+
+# build multicloudsim
+docker build -t multicloudsim .
+
+# run multicloudsim
+docker run -d --name multicloudsim -p 8082:8082 multicloudsim
+
+MULTICLOUDSIM_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' multicloudsim`
+echo "MULTICLOUDSIM_IP=${MULTICLOUDSIM_IP}"
+
+${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${MULTICLOUDSIM_IP} 8082
+
# wait a while before continuing
-sleep 5
+sleep 2
echo "inspect docker things for tracing purpose"
docker inspect aaisim
+docker inspect multicloudsim
diff --git a/test/csit/scripts/optf-has/has/simulator_teardown_script.sh b/test/csit/scripts/optf-has/has/simulator_teardown_script.sh
index e300a985a..a2edd3328 100755
--- a/test/csit/scripts/optf-has/has/simulator_teardown_script.sh
+++ b/test/csit/scripts/optf-has/has/simulator_teardown_script.sh
@@ -16,6 +16,7 @@
#
echo "optf/has scripts docker containers killing";
docker stop aaisim
+docker stop multicloudsim
docker rm aaisim
-
+docker rm multicloudsim
diff --git a/test/csit/scripts/policy/script1.sh b/test/csit/scripts/policy/script1.sh
index cfa29ec6f..0db34f080 100755
--- a/test/csit/scripts/policy/script1.sh
+++ b/test/csit/scripts/policy/script1.sh
@@ -105,8 +105,6 @@ fi
docker ps
-#sleep 4m
-
POLICY_IP=`docker inspect --format '{{ .NetworkSettings.Networks.docker_default.IPAddress}}' drools`
echo ${POLICY_IP}
@@ -125,11 +123,26 @@ echo ${NEXUS_IP}
MARIADB_IP=`docker inspect --format '{{ .NetworkSettings.Networks.docker_default.IPAddress}}' mariadb`
echo ${MARIADB_IP}
+sleep 5m
+
+netstat -tnl
+
+docker logs mariadb
${DIR}/wait_for_port.sh ${MARIADB_IP} 3306
+
+docker logs pap
${DIR}/wait_for_port.sh ${PAP_IP} 9091
+
+docker logs pdp
${DIR}/wait_for_port.sh ${PDP_IP} 8081
+
+docker logs brmsgw
${DIR}/wait_for_port.sh ${BRMS_IP} 9989
+
+docker logs nexus
${DIR}/wait_for_port.sh ${NEXUS_IP} 8081
+
+docker logs drools
${DIR}/wait_for_port.sh ${POLICY_IP} 6969
TIME_OUT=600
diff --git a/test/csit/tests/aaf/aaf-sms-suite/__init__.robot b/test/csit/tests/aaf/aaf-sms-suite/__init__.robot
new file mode 100644
index 000000000..d1da7f385
--- /dev/null
+++ b/test/csit/tests/aaf/aaf-sms-suite/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Integration - Suite 1 \ No newline at end of file
diff --git a/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot b/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot
new file mode 100644
index 000000000..1302abc79
--- /dev/null
+++ b/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot
@@ -0,0 +1,94 @@
+*** Settings ***
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Variables ***
+${MESSAGE} {"ping": "ok"}
+
+#global variables
+${generatedAID}
+
+*** Test Cases ***
+SMS Check SMS API Docker Container
+ [Documentation] Checks if SMS docker container is running
+ ${rc} ${output}= Run and Return RC and Output docker ps
+ Log To Console *********************
+ Log To Console retrurn_code = ${rc}
+ Log To Console output = ${output}
+ Should Be Equal As Integers ${rc} 0
+ Should Contain ${output} nexus3.onap.org:10001/onap/aaf/sms
+
+SMS GetStatus
+ [Documentation] Gets Backend Status
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request SMS /v1/sms/status headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+SMS CreateDomain
+ [Documentation] Creates a Secret Domain to hold Secrets
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ ${data} Get Binary File ${CURDIR}${/}data${/}create_domain.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request SMS /v1/sms/domain data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 201
+
+SMS CreateSecret
+ [Documentation] Create A Secret within the Domain
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ ${data} Get Binary File ${CURDIR}${/}data${/}create_secret.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request SMS /v1/sms/domain/curltestdomain/secret data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 201
+
+SMS ListSecret
+ [Documentation] Lists all Secret Names within Domain
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request SMS /v1/sms/domain/curltestdomain/secret headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+SMS GetSecret
+ [Documentation] Gets a single Secret with Values from Domain
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request SMS /v1/sms/domain/curltestdomain/secret/curltestsecret1 headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+SMS DeleteSecret
+ [Documentation] Deletes a Secret referenced by Name from Domain
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Delete Request SMS /v1/sms/domain/curltestdomain/secret/curltestsecret1 headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 204
+
+SMS DeleteDomain
+ [Documentation] Deletes a Domain referenced by Name
+ Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Delete Request SMS /v1/sms/domain/curltestdomain headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 204
+
+*** Keywords ***
diff --git a/test/csit/tests/aaf/aaf-sms-suite/data/create_domain.json b/test/csit/tests/aaf/aaf-sms-suite/data/create_domain.json
new file mode 100644
index 000000000..176f44431
--- /dev/null
+++ b/test/csit/tests/aaf/aaf-sms-suite/data/create_domain.json
@@ -0,0 +1,3 @@
+{
+ "name": "curltestdomain"
+}
diff --git a/test/csit/tests/aaf/aaf-sms-suite/data/create_secret.json b/test/csit/tests/aaf/aaf-sms-suite/data/create_secret.json
new file mode 100644
index 000000000..d99f4e2e0
--- /dev/null
+++ b/test/csit/tests/aaf/aaf-sms-suite/data/create_secret.json
@@ -0,0 +1,12 @@
+{
+ "name": "curltestsecret1",
+ "values": {
+ "name":"rah",
+ "age":35,
+ "map":{
+ "mapkey1": "mapvalue1",
+ "mapkey2": "mapvalue2"
+ },
+ "array":["golang","c++","java","python"]
+ }
+}
diff --git a/test/csit/tests/clamp/APIs/01__Create_CL_Holmes.robot b/test/csit/tests/clamp/APIs/01__Create_CL_Holmes.robot
index 86e792313..f820ef632 100644
--- a/test/csit/tests/clamp/APIs/01__Create_CL_Holmes.robot
+++ b/test/csit/tests/clamp/APIs/01__Create_CL_Holmes.robot
@@ -4,6 +4,9 @@ Library RequestsLibrary
Library OperatingSystem
Library json
+*** Variables ***
+${login} admin
+${passw} password
*** Test Cases ***
Get Requests health check ok
@@ -12,7 +15,7 @@ Get Requests health check ok
Should Be Equal As Strings ${resp.status_code} 200
Get Requests verify test template found
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/cldsTempate/template-names
Should Be Equal As Strings ${resp.status_code} 200
@@ -21,7 +24,7 @@ Get Requests verify test template found
Should Not Contain Match ${resp} *templateHolmes99*
Put Requests to add Close Loop ClHolmes1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${data}= Get Binary File ${CURDIR}${/}data${/}createClHolmes1.json
&{headers}= Create Dictionary Content-Type=application/json
@@ -29,7 +32,7 @@ Put Requests to add Close Loop ClHolmes1
Should Be Equal As Strings ${resp.status_code} 200
Put Requests to add Close Loop ClHolmes2
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${data}= Get Binary File ${CURDIR}${/}data${/}createClHolmes2.json
&{headers}= Create Dictionary Content-Type=application/json
@@ -37,7 +40,7 @@ Put Requests to add Close Loop ClHolmes2
Should Be Equal As Strings ${resp.status_code} 200
Get Requests verify CL1 found
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Be Equal As Strings ${resp.status_code} 200
diff --git a/test/csit/tests/clamp/APIs/02__Create_CL_TCA.robot b/test/csit/tests/clamp/APIs/02__Create_CL_TCA.robot
index 4805ced60..5dfa87904 100644
--- a/test/csit/tests/clamp/APIs/02__Create_CL_TCA.robot
+++ b/test/csit/tests/clamp/APIs/02__Create_CL_TCA.robot
@@ -4,6 +4,9 @@ Library RequestsLibrary
Library OperatingSystem
Library json
+*** Variables ***
+${login} admin
+${passw} password
*** Test Cases ***
Get Requests health check ok
@@ -12,7 +15,7 @@ Get Requests health check ok
Should Be Equal As Strings ${resp.status_code} 200
Get Requests verify test template found
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/cldsTempate/template-names
Should Be Equal As Strings ${resp.status_code} 200
@@ -21,7 +24,7 @@ Get Requests verify test template found
Should Not Contain Match ${resp} *templateTCA99*
Put Requests to add Close Loop ClHolmes1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${data}= Get Binary File ${CURDIR}${/}data${/}createClTCA1.json
&{headers}= Create Dictionary Content-Type=application/json
@@ -29,7 +32,7 @@ Put Requests to add Close Loop ClHolmes1
Should Be Equal As Strings ${resp.status_code} 200
Put Requests to add Close Loop ClHolmes2
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${data}= Get Binary File ${CURDIR}${/}data${/}createClTCA2.json
&{headers}= Create Dictionary Content-Type=application/json
@@ -37,7 +40,7 @@ Put Requests to add Close Loop ClHolmes2
Should Be Equal As Strings ${resp.status_code} 200
Get Requests verify CL1 found
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Be Equal As Strings ${resp.status_code} 200
diff --git a/test/csit/tests/clamp/APIs/03__VariousApis.robot b/test/csit/tests/clamp/APIs/03__VariousApis.robot
index f875e4c62..fb671ca41 100644
--- a/test/csit/tests/clamp/APIs/03__VariousApis.robot
+++ b/test/csit/tests/clamp/APIs/03__VariousApis.robot
@@ -4,24 +4,27 @@ Library RequestsLibrary
Library OperatingSystem
Library json
+*** Variables ***
+${login} admin
+${passw} password
*** Test Cases ***
Get Clamp properties
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/properties
Dictionary Should Contain Key ${resp.json()} global
Dictionary Should Contain Key ${resp.json()['global']} location
Get Clamp Info
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/cldsInfo
Dictionary Should Contain Key ${resp.json()} userName
Dictionary Should Contain Key ${resp.json()} cldsVersion
Get model bpmn by name
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/bpmn/ClHolmes1
Should Contain Match ${resp} *StartEvent_*
@@ -31,7 +34,7 @@ Get model bpmn by name
Should Contain Match ${resp} *EndEvent_*
Get model names
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Contain Match ${resp} *ClHolmes1*
diff --git a/test/csit/tests/clamp/APIs/04__Verify_API_Models.robot b/test/csit/tests/clamp/APIs/04__Verify_API_Models.robot
index e7b2fb66d..3ae6aeeb9 100644
--- a/test/csit/tests/clamp/APIs/04__Verify_API_Models.robot
+++ b/test/csit/tests/clamp/APIs/04__Verify_API_Models.robot
@@ -4,9 +4,13 @@ Library RequestsLibrary
Library OperatingSystem
Library json
+*** Variables ***
+${login} admin
+${passw} password
+
*** Test Cases ***
Verify HolmesModel1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/ClHolmes1
Should Contain Match ${resp} *templateHolmes1*
@@ -19,7 +23,7 @@ Verify HolmesModel1
Should Contain Match ${resp} *Config Policy name1*
Verify HolmesModel2
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/ClHolmes2
Should Contain Match ${resp} *templateHolmes2*
@@ -33,7 +37,7 @@ Verify HolmesModel2
Should Contain Match ${resp} *Config Policy Name2*
Verify TCAModel1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/ClTCA1
Should Contain Match ${resp} *templateTCA1*
@@ -46,7 +50,7 @@ Verify TCAModel1
Should Contain Match ${resp} *ONSET*
Verify TCAModel2
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/ClTCA2
Should Contain Match ${resp} *templateTCA2*
@@ -61,7 +65,7 @@ Verify TCAModel2
Should Contain Match ${resp} *VM*
Get model names
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Contain Match ${resp} *ClHolmes1*
diff --git a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
index a94255099..3f87179cb 100644
--- a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
+++ b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
@@ -7,6 +7,8 @@ Library Selenium2Library
Library XvfbRobot
*** Variables ***
+${login} admin
+${passw} password
${SELENIUM_SPEED_FAST} .2 seconds
${SELENIUM_SPEED_SLOW} .5 seconds
@@ -26,8 +28,8 @@ Open Browser
Should Be Equal CLDS ${title}
Good Login to Clamp UI and Verify logged in
- Input Text locator=username text=admin
- Input Text locator=password text=password
+ Input Text locator=username text=${login}
+ Input Text locator=password text=${passw}
Press Key locator=password key=\\13
Wait Until Element Is Visible xpath=//*[@class="navbar-brand logo_name ng-binding"] timeout=60
Element Text Should Be xpath=//*[@class="navbar-brand logo_name ng-binding"] expected=Hello:admin
@@ -84,7 +86,7 @@ Close Browser
Close Browser
Verify Holmes CL well created
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Contain Match ${resp} *HolmesModel1*
diff --git a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
index caacec36a..99d93c312 100644
--- a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
+++ b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
@@ -7,6 +7,8 @@ Library Selenium2Library
Library XvfbRobot
*** Variables ***
+${login} admin
+${passw} password
${SELENIUM_SPEED_FAST} .2 seconds
${SELENIUM_SPEED_SLOW} .5 seconds
@@ -26,8 +28,8 @@ Open Browser
Should Be Equal CLDS ${title}
Good Login to Clamp UI and Verify logged in
- Input Text locator=username text=admin
- Input Text locator=password text=password
+ Input Text locator=username text=${login}
+ Input Text locator=password text=${passw}
Press Key locator=password key=\\13
Wait Until Element Is Visible xpath=//*[@class="navbar-brand logo_name ng-binding"] timeout=60
Element Text Should Be xpath=//*[@class="navbar-brand logo_name ng-binding"] expected=Hello:admin
@@ -63,7 +65,19 @@ Set Policy Box properties for TCAModel1
Input Text locator=timeout text=400
Click Button locator=Close
-### Cannot set TCA box attributes due to element not interractable with Selenium
+Set TCA Box properties for TCAModel1
+ Wait Until Element Is Visible xpath=//*[@data-element-id="Policy_12lup3h"] timeout=60
+ Click Element xpath=//*[@data-element-id="TCA_1d13unw"]
+ Input Text xpath=(//input[@id='tname'])[2] text=TCA1
+ Select From List By Label xpath=//*[@id="tcaPol"] Policy2
+ Select From List By Label xpath=//*[@id="eventName"] vCPEvGMUXPacketLoss
+### Cannot set all TCA box attributes due to element not interractable with Selenium
+# Select From List By Label xpath=//*[@id="controlLoopSchemaType"] VNF
+# Select From List By Index xpath=//*[@id="controlLoopSchemaType"] 1
+ Click Element xpath=(//button[@id='createNewThresh'])[2]
+ Input Text xpath=(//input[@id='threshold'])[2] 6
+# Select From List By Label xpath=//*[@id="closedLoopEventStatus"] ONSET
+ Click Button id=savePropsBtn
Save Model from Menu
Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[1]/a timeout=60
@@ -79,7 +93,7 @@ Close Browser
Close Browser
Verify TCA CL well create
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Contain Match ${resp} *TCAModel1*
diff --git a/test/csit/tests/clamp/UIs/03__Verify_UI_Models.robot b/test/csit/tests/clamp/UIs/03__Verify_UI_Models.robot
index 8aa74f420..70cbf1262 100644
--- a/test/csit/tests/clamp/UIs/03__Verify_UI_Models.robot
+++ b/test/csit/tests/clamp/UIs/03__Verify_UI_Models.robot
@@ -4,9 +4,13 @@ Library RequestsLibrary
Library OperatingSystem
Library json
+*** Variables ***
+${login} admin
+${passw} password
+
*** Test Cases ***
Verify HolmesModel1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/HolmesModel1
Should Contain Match ${resp} *templateHolmes1*
@@ -21,7 +25,7 @@ Verify HolmesModel1
Should Contain Match ${resp} *config Policy Name1*
Verify TCAModel1
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model/TCAModel1
Should Contain Match ${resp} *templateTCA1*
@@ -35,7 +39,7 @@ Verify TCAModel1
Should Contain Match ${resp} *400*
Get model names
- ${auth}= Create List admin 5f4dcc3b5aa765d61d8327deb882cf99
+ ${auth}= Create List ${login} ${passw}
Create Session clamp http://localhost:8080 auth=${auth}
${resp}= Get Request clamp /restservices/clds/v1/clds/model-names
Should Contain Match ${resp} *HolmesModel1*
diff --git a/test/csit/tests/clamp/UIs/04__Submit_deploy_chain_Holmes.robot b/test/csit/tests/clamp/UIs/04__Submit_deploy_chain_Holmes.robot
new file mode 100644
index 000000000..f400156d1
--- /dev/null
+++ b/test/csit/tests/clamp/UIs/04__Submit_deploy_chain_Holmes.robot
@@ -0,0 +1,130 @@
+*** Settings ***
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library Selenium2Library
+Library XvfbRobot
+
+*** Variables ***
+${login} admin
+${passw} password
+${SELENIUM_SPEED_FAST} .2 seconds
+${SELENIUM_SPEED_SLOW} .5 seconds
+
+*** Test Cases ***
+Get Requests health check ok
+ CreateSession clamp http://localhost:8080
+ ${resp}= Get Request clamp /restservices/clds/v1/clds/healthcheck
+ Should Be Equal As Strings ${resp.status_code} 200
+
+Open Browser
+# Next line is to be enabled for Headless tests only (jenkins?). To see the tests disable the line.
+ Start Virtual Display 1920 1080
+ Open Browser http://localhost:8080/designer/index.html browser=firefox
+ Set Selenium Speed ${SELENIUM_SPEED_SLOW}
+ Set Window Size 1920 1080
+ ${title}= Get Title
+ Should Be Equal CLDS ${title}
+
+Good Login to Clamp UI and Verify logged in
+ Input Text locator=username text=${login}
+ Input Text locator=password text=${passw}
+ Press Key locator=password key=\\13
+ Wait Until Element Is Visible xpath=//*[@class="navbar-brand logo_name ng-binding"] timeout=60
+ Element Text Should Be xpath=//*[@class="navbar-brand logo_name ng-binding"] expected=Hello:admin
+
+Open Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[1]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[1]/a
+ Wait Until Element Is Visible locator=Open CL timeout=60
+ Click Element locator=Open CL
+ Select From List By Label id=modelName HolmesModel1
+ Click Button locator=OK
+ Element Should Contain xpath=//*[@id="modeler_name"] Closed Loop Modeler - HolmesModel1
+ Element Should Contain xpath=//*[@id="status_clds"] DESIGN
+
+Validate-Test Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Validation Test timeout=60
+ Click Element locator=Validation Test
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:TEST
+ Element Should Contain xpath=//*[@id="status_clds"] DESIGN
+
+Submit Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Submit timeout=60
+ Click Element locator=Submit
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:SUBMIT
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Resubmit Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Resubmit timeout=60
+ Click Element locator=Resubmit
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:RESUBMIT
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Deploy Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Deploy timeout=60
+ Click Element locator=Deploy
+ Wait Until Element Is Visible xpath=//*[@id="deployProperties"] timeout=60
+ Input Text xpath=//*[@id="deployProperties"] text={}
+ Click Button locator=Deploy
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:deploy
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+Update Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Update timeout=60
+ Click Element locator=Update
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:UPDATE
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+Stop Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Stop timeout=60
+ Click Element locator=Stop
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:STOP
+ Element Should Contain xpath=//*[@id="status_clds"] STOPPED
+
+Restart Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Restart timeout=60
+ Click Element locator=Restart
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:RESTART
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+UnDeploy Holmes CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=UnDeploy timeout=60
+ Click Element locator=UnDeploy
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:undeploy
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Close Browser
+ Close Browser
diff --git a/test/csit/tests/clamp/UIs/05__Submit_deploy_chain_TCA.robot b/test/csit/tests/clamp/UIs/05__Submit_deploy_chain_TCA.robot
new file mode 100644
index 000000000..34c1a71ef
--- /dev/null
+++ b/test/csit/tests/clamp/UIs/05__Submit_deploy_chain_TCA.robot
@@ -0,0 +1,130 @@
+*** Settings ***
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library Selenium2Library
+Library XvfbRobot
+
+*** Variables ***
+${login} admin
+${passw} password
+${SELENIUM_SPEED_FAST} .2 seconds
+${SELENIUM_SPEED_SLOW} .5 seconds
+
+*** Test Cases ***
+Get Requests health check ok
+ CreateSession clamp http://localhost:8080
+ ${resp}= Get Request clamp /restservices/clds/v1/clds/healthcheck
+ Should Be Equal As Strings ${resp.status_code} 200
+
+Open Browser
+# Next line is to be enabled for Headless tests only (jenkins?). To see the tests disable the line.
+ Start Virtual Display 1920 1080
+ Open Browser http://localhost:8080/designer/index.html browser=firefox
+ Set Selenium Speed ${SELENIUM_SPEED_SLOW}
+ Set Window Size 1920 1080
+ ${title}= Get Title
+ Should Be Equal CLDS ${title}
+
+Good Login to Clamp UI and Verify logged in
+ Input Text locator=username text=${login}
+ Input Text locator=password text=${passw}
+ Press Key locator=password key=\\13
+ Wait Until Element Is Visible xpath=//*[@class="navbar-brand logo_name ng-binding"] timeout=60
+ Element Text Should Be xpath=//*[@class="navbar-brand logo_name ng-binding"] expected=Hello:admin
+
+Open TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[1]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[1]/a
+ Wait Until Element Is Visible locator=Open CL timeout=60
+ Click Element locator=Open CL
+ Select From List By Label id=modelName TCAModel1
+ Click Button locator=OK
+ Element Should Contain xpath=//*[@id="modeler_name"] Closed Loop Modeler - TCAModel1
+ Element Should Contain xpath=//*[@id="status_clds"] DESIGN
+
+Validate-Test TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Validation Test timeout=60
+ Click Element locator=Validation Test
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:TEST
+ Element Should Contain xpath=//*[@id="status_clds"] DESIGN
+
+Submit TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Submit timeout=60
+ Click Element locator=Submit
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:SUBMIT
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Resubmit TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Resubmit timeout=60
+ Click Element locator=Resubmit
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:RESUBMIT
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Deploy TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Deploy timeout=60
+ Click Element locator=Deploy
+ Wait Until Element Is Visible xpath=//*[@id="deployProperties"] timeout=60
+ Input Text xpath=//*[@id="deployProperties"] text={}
+ Click Button locator=Deploy
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:deploy
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+Update TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Update timeout=60
+ Click Element locator=Update
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:UPDATE
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+Stop TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Stop timeout=60
+ Click Element locator=Stop
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:STOP
+ Element Should Contain xpath=//*[@id="status_clds"] STOPPED
+
+Restart TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=Restart timeout=60
+ Click Element locator=Restart
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:RESTART
+ Element Should Contain xpath=//*[@id="status_clds"] ACTIVE
+
+UnDeploy TCA CL
+ Wait Until Element Is Visible xpath=//*[@id="navbar"]/ul/li[2]/a timeout=60
+ Click Element xpath=//*[@id="navbar"]/ul/li[2]/a
+ Wait Until Element Is Visible locator=UnDeploy timeout=60
+ Click Element locator=UnDeploy
+ Click Button locator=Yes
+ Wait Until Element Is Visible xpath=//*[@id="alert_message_"] timeout=60
+ Element Text Should Be xpath=//*[@id="alert_message_"] expected=Action Successful:undeploy
+ Element Should Contain xpath=//*[@id="status_clds"] DISTRIBUTED
+
+Close Browser
+ Close Browser
diff --git a/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py b/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py
index 63e4e8c6b..db59557db 100644
--- a/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py
+++ b/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py
@@ -1,423 +1,423 @@
-'''
-Created on Aug 15, 2017
-
-@author: sw6830
-'''
-import os
-import posixpath
-import BaseHTTPServer
-import urllib
-import urlparse
-import cgi, sys, shutil, mimetypes
-from jsonschema import validate
-import jsonschema, json
-import DcaeVariables
-import SimpleHTTPServer
-from robot.api import logger
-
-
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-
-EvtSchema = None
-DMaaPHttpd = None
-
-
-def cleanUpEvent():
- sz = DcaeVariables.VESEventQ.qsize()
- for i in range(sz):
- try:
- self.evtQueue.get_nowait()
- except:
- pass
-
-def enqueEvent(evt):
- if DcaeVariables.VESEventQ != None:
- try:
- DcaeVariables.VESEventQ.put(evt)
- if DcaeVariables.IsRobotRun:
- logger.console("DMaaP Event enqued - size=" + str(len(evt)))
- else:
- print ("DMaaP Event enqueued - size=" + str(len(evt)))
- return True
- except Exception as e:
- print (str(e))
- return False
- return False
-
-def dequeEvent(waitSec=25):
- if DcaeVariables.IsRobotRun:
- logger.console("Enter DequeEvent")
- try:
- evt = DcaeVariables.VESEventQ.get(True, waitSec)
- if DcaeVariables.IsRobotRun:
- logger.console("DMaaP Event dequeued - size=" + str(len(evt)))
- else:
- print("DMaaP Event dequeued - size=" + str(len(evt)))
- return evt
- except Exception as e:
- if DcaeVariables.IsRobotRun:
- logger.console(str(e))
- logger.console("DMaaP Event dequeue timeout")
- else:
- print("DMaaP Event dequeue timeout")
- return None
-
-class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-
- def do_PUT(self):
- self.send_response(405)
- return
-
- def do_POST(self):
-
- respCode = 0
- # Parse the form data posted
- '''
- form = cgi.FieldStorage(
- fp=self.rfile,
- headers=self.headers,
- environ={'REQUEST_METHOD':'POST',
- 'CONTENT_TYPE':self.headers['Content-Type'],
- })
-
-
- form = cgi.FieldStorage(
- fp=self.rfile,
- headers=self.headers,
- environ={"REQUEST_METHOD": "POST"})
-
- for item in form.list:
- print "%s=%s" % (item.name, item.value)
-
- '''
-
- if 'POST' not in self.requestline:
- respCode = 405
-
- '''
- if respCode == 0:
- if '/eventlistener/v5' not in self.requestline and '/eventlistener/v5/eventBatch' not in self.requestline and \
- '/eventlistener/v5/clientThrottlingState' not in self.requestline:
- respCode = 404
-
-
- if respCode == 0:
- if 'Y29uc29sZTpaakprWWpsbE1qbGpNVEkyTTJJeg==' not in str(self.headers):
- respCode = 401
- '''
-
- if respCode == 0:
- content_len = int(self.headers.getheader('content-length', 0))
- post_body = self.rfile.read(content_len)
-
- if DcaeVariables.IsRobotRun:
- logger.console("\n" + "DMaaP Receive Event:\n" + post_body)
- else:
- print("\n" + "DMaaP Receive Event:")
- print (post_body)
-
- indx = post_body.index("{")
- if indx != 0:
- post_body = post_body[indx:]
-
- if enqueEvent(post_body) == False:
- print "enque event fails"
-
- global EvtSchema
- try:
- if EvtSchema == None:
- with open(DcaeVariables.CommonEventSchemaV5) as file:
- EvtSchema = json.load(file)
- decoded_body = json.loads(post_body)
- jsonschema.validate(decoded_body, EvtSchema)
- except:
- respCode = 400
-
- # Begin the response
- if DcaeVariables.IsRobotRun == False:
- print ("Response Message:")
-
- '''
- {
- "200" : {
- "description" : "Success",
- "schema" : {
- "$ref" : "#/definitions/DR_Pub"
- }
- }
-
- rspStr = "{'responses' : {'200' : {'description' : 'Success'}}}"
- rspStr1 = "{'count': 1, 'serverTimeMs': 3}"
-
- '''
-
- if respCode == 0:
- if 'clientThrottlingState' in self.requestline:
- self.send_response(204)
- else:
- self.send_response(200)
- self.send_header('Content-Type', 'application/json')
- self.end_headers()
- #self.wfile.write("{'responses' : {'200' : {'description' : 'Success'}}}")
- self.wfile.write("{'count': 1, 'serverTimeMs': 3}")
- self.wfile.close()
- else:
- self.send_response(respCode)
-
- '''
- self.end_headers()
- self.wfile.write('Client: %s\n' % str(self.client_address))
- self.wfile.write('User-agent: %s\n' % str(self.headers['user-agent']))
- self.wfile.write('Path: %s\n' % self.path)
- self.wfile.write('Form data:\n')
- self.wfile.close()
-
- # Echo back information about what was posted in the form
- for field in form.keys():
- field_item = form[field]
- if field_item.filename:
- # The field contains an uploaded file
- file_data = field_item.file.read()
- file_len = len(file_data)
- del file_data
- self.wfile.write('\tUploaded %s as "%s" (%d bytes)\n' % \
- (field, field_item.filename, file_len))
- else:
- # Regular form value
- self.wfile.write('\t%s=%s\n' % (field, form[field].value))
- '''
- return
-
-
- def do_GET(self):
- """Serve a GET request."""
- f = self.send_head()
- if f:
- try:
- self.copyfile(f, self.wfile)
- finally:
- f.close()
-
- def do_HEAD(self):
- """Serve a HEAD request."""
- f = self.send_head()
- if f:
- f.close()
-
- def send_head(self):
- """Common code for GET and HEAD commands.
-
- This sends the response code and MIME headers.
-
- Return value is either a file object (which has to be copied
- to the outputfile by the caller unless the command was HEAD,
- and must be closed by the caller under all circumstances), or
- None, in which case the caller has nothing further to do.
-
- """
- path = self.translate_path(self.path)
- f = None
- if os.path.isdir(path):
- parts = urlparse.urlsplit(self.path)
- if not parts.path.endswith('/'):
- # redirect browser - doing basically what apache does
- self.send_response(301)
- new_parts = (parts[0], parts[1], parts[2] + '/',
- parts[3], parts[4])
- new_url = urlparse.urlunsplit(new_parts)
- self.send_header("Location", new_url)
- self.end_headers()
- return None
- for index in "index.html", "index.htm":
- index = os.path.join(path, index)
- if os.path.exists(index):
- path = index
- break
- else:
- return self.list_directory(path)
- ctype = self.guess_type(path)
- try:
- # Always read in binary mode. Opening files in text mode may cause
- # newline translations, making the actual size of the content
- # transmitted *less* than the content-length!
- f = open(path, 'rb')
- except IOError:
- self.send_error(404, "File not found")
- return None
- try:
- self.send_response(200)
- self.send_header("Content-type", ctype)
- fs = os.fstat(f.fileno())
- self.send_header("Content-Length", str(fs[6]))
- self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
- self.end_headers()
- return f
- except:
- f.close()
- raise
-
- def list_directory(self, path):
- """Helper to produce a directory listing (absent index.html).
-
- Return value is either a file object, or None (indicating an
- error). In either case, the headers are sent, making the
- interface the same as for send_head().
-
- """
- try:
- list = os.listdir(path)
- except os.error:
- self.send_error(404, "No permission to list directory")
- return None
- list.sort(key=lambda a: a.lower())
- f = StringIO()
- displaypath = cgi.escape(urllib.unquote(self.path))
- f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
- f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
- f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
- f.write("<hr>\n<ul>\n")
- for name in list:
- fullname = os.path.join(path, name)
- displayname = linkname = name
- # Append / for directories or @ for symbolic links
- if os.path.isdir(fullname):
- displayname = name + "/"
- linkname = name + "/"
- if os.path.islink(fullname):
- displayname = name + "@"
- # Note: a link to a directory displays with @ and links with /
- f.write('<li><a href="%s">%s</a>\n'
- % (urllib.quote(linkname), cgi.escape(displayname)))
- f.write("</ul>\n<hr>\n</body>\n</html>\n")
- length = f.tell()
- f.seek(0)
- self.send_response(200)
- encoding = sys.getfilesystemencoding()
- self.send_header("Content-type", "text/html; charset=%s" % encoding)
- self.send_header("Content-Length", str(length))
- self.end_headers()
- return f
-
- def translate_path(self, path):
- """Translate a /-separated PATH to the local filename syntax.
-
- Components that mean special things to the local file system
- (e.g. drive or directory names) are ignored. (XXX They should
- probably be diagnosed.)
-
- """
- # abandon query parameters
- path = path.split('?',1)[0]
- path = path.split('#',1)[0]
- # Don't forget explicit trailing slash when normalizing. Issue17324
- trailing_slash = path.rstrip().endswith('/')
- path = posixpath.normpath(urllib.unquote(path))
- words = path.split('/')
- words = filter(None, words)
- path = os.getcwd()
- for word in words:
- if os.path.dirname(word) or word in (os.curdir, os.pardir):
- # Ignore components that are not a simple file/directory name
- continue
- path = os.path.join(path, word)
- if trailing_slash:
- path += '/'
- return path
-
- def copyfile(self, source, outputfile):
- """Copy all data between two file objects.
-
- The SOURCE argument is a file object open for reading
- (or anything with a read() method) and the DESTINATION
- argument is a file object open for writing (or
- anything with a write() method).
-
- The only reason for overriding this would be to change
- the block size or perhaps to replace newlines by CRLF
- -- note however that this the default server uses this
- to copy binary data as well.
-
- """
- shutil.copyfileobj(source, outputfile)
-
- def guess_type(self, path):
- """Guess the type of a file.
-
- Argument is a PATH (a filename).
-
- Return value is a string of the form type/subtype,
- usable for a MIME Content-type header.
-
- The default implementation looks the file's extension
- up in the table self.extensions_map, using application/octet-stream
- as a default; however it would be permissible (if
- slow) to look inside the data to make a better guess.
-
- """
-
- base, ext = posixpath.splitext(path)
- if ext in self.extensions_map:
- return self.extensions_map[ext]
- ext = ext.lower()
- if ext in self.extensions_map:
- return self.extensions_map[ext]
- else:
- return self.extensions_map['']
-
- if not mimetypes.inited:
- mimetypes.init() # try to read system mime.types
- extensions_map = mimetypes.types_map.copy()
- extensions_map.update({
- '': 'application/octet-stream', # Default
- '.py': 'text/plain',
- '.c': 'text/plain',
- '.h': 'text/plain',
- })
-
-def test(HandlerClass = DMaaPHandler,
- ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0", port=3904):
- print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5
- with open(DcaeVariables.CommonEventSchemaV5) as file:
- global EvtSchema
- EvtSchema = json.load(file)
-
- server_address = ('', port)
-
- HandlerClass.protocol_version = protocol
- httpd = ServerClass(server_address, HandlerClass)
-
- global DMaaPHttpd
- DMaaPHttpd = httpd
- DcaeVariables.HTTPD = httpd
-
- sa = httpd.socket.getsockname()
- print "Serving HTTP on", sa[0], "port", sa[1], "..."
- #httpd.serve_forever()
-
-def _main_ (HandlerClass = DMaaPHandler,
- ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0"):
-
- if sys.argv[1:]:
- port = int(sys.argv[1])
- else:
- port = 3904
-
- print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5
- with open(DcaeVariables.CommonEventSchemaV5) as file:
- global EvtSchema
- EvtSchema = json.load(file)
-
- server_address = ('', port)
-
- HandlerClass.protocol_version = protocol
- httpd = ServerClass(server_address, HandlerClass)
-
- sa = httpd.socket.getsockname()
- print "Serving HTTP on", sa[0], "port", sa[1], "..."
- httpd.serve_forever()
-
-if __name__ == '__main__':
+'''
+Created on Aug 15, 2017
+
+@author: sw6830
+'''
+import os
+import posixpath
+import BaseHTTPServer
+import urllib
+import urlparse
+import cgi, sys, shutil, mimetypes
+from jsonschema import validate
+import jsonschema, json
+import DcaeVariables
+import SimpleHTTPServer
+from robot.api import logger
+
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+EvtSchema = None
+DMaaPHttpd = None
+
+
+def cleanUpEvent():
+ sz = DcaeVariables.VESEventQ.qsize()
+ for i in range(sz):
+ try:
+ self.evtQueue.get_nowait()
+ except:
+ pass
+
+def enqueEvent(evt):
+ if DcaeVariables.VESEventQ != None:
+ try:
+ DcaeVariables.VESEventQ.put(evt)
+ if DcaeVariables.IsRobotRun:
+ logger.console("DMaaP Event enqued - size=" + str(len(evt)))
+ else:
+ print ("DMaaP Event enqueued - size=" + str(len(evt)))
+ return True
+ except Exception as e:
+ print (str(e))
+ return False
+ return False
+
+def dequeEvent(waitSec=25):
+ if DcaeVariables.IsRobotRun:
+ logger.console("Enter DequeEvent")
+ try:
+ evt = DcaeVariables.VESEventQ.get(True, waitSec)
+ if DcaeVariables.IsRobotRun:
+ logger.console("DMaaP Event dequeued - size=" + str(len(evt)))
+ else:
+ print("DMaaP Event dequeued - size=" + str(len(evt)))
+ return evt
+ except Exception as e:
+ if DcaeVariables.IsRobotRun:
+ logger.console(str(e))
+ logger.console("DMaaP Event dequeue timeout")
+ else:
+ print("DMaaP Event dequeue timeout")
+ return None
+
+class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+
+ def do_PUT(self):
+ self.send_response(405)
+ return
+
+ def do_POST(self):
+
+ respCode = 0
+ # Parse the form data posted
+ '''
+ form = cgi.FieldStorage(
+ fp=self.rfile,
+ headers=self.headers,
+ environ={'REQUEST_METHOD':'POST',
+ 'CONTENT_TYPE':self.headers['Content-Type'],
+ })
+
+
+ form = cgi.FieldStorage(
+ fp=self.rfile,
+ headers=self.headers,
+ environ={"REQUEST_METHOD": "POST"})
+
+ for item in form.list:
+ print "%s=%s" % (item.name, item.value)
+
+ '''
+
+ if 'POST' not in self.requestline:
+ respCode = 405
+
+ '''
+ if respCode == 0:
+ if '/eventlistener/v5' not in self.requestline and '/eventlistener/v5/eventBatch' not in self.requestline and \
+ '/eventlistener/v5/clientThrottlingState' not in self.requestline:
+ respCode = 404
+
+
+ if respCode == 0:
+ if 'Y29uc29sZTpaakprWWpsbE1qbGpNVEkyTTJJeg==' not in str(self.headers):
+ respCode = 401
+ '''
+
+ if respCode == 0:
+ content_len = int(self.headers.getheader('content-length', 0))
+ post_body = self.rfile.read(content_len)
+
+ if DcaeVariables.IsRobotRun:
+ logger.console("\n" + "DMaaP Receive Event:\n" + post_body)
+ else:
+ print("\n" + "DMaaP Receive Event:")
+ print (post_body)
+
+ indx = post_body.index("{")
+ if indx != 0:
+ post_body = post_body[indx:]
+
+ if enqueEvent(post_body) == False:
+ print "enque event fails"
+
+ global EvtSchema
+ try:
+ if EvtSchema == None:
+ with open(DcaeVariables.CommonEventSchemaV5) as file:
+ EvtSchema = json.load(file)
+ decoded_body = json.loads(post_body)
+ jsonschema.validate(decoded_body, EvtSchema)
+ except:
+ respCode = 400
+
+ # Begin the response
+ if DcaeVariables.IsRobotRun == False:
+ print ("Response Message:")
+
+ '''
+ {
+ "200" : {
+ "description" : "Success",
+ "schema" : {
+ "$ref" : "#/definitions/DR_Pub"
+ }
+ }
+
+ rspStr = "{'responses' : {'200' : {'description' : 'Success'}}}"
+ rspStr1 = "{'count': 1, 'serverTimeMs': 3}"
+
+ '''
+
+ if respCode == 0:
+ if 'clientThrottlingState' in self.requestline:
+ self.send_response(204)
+ else:
+ self.send_response(200)
+ self.send_header('Content-Type', 'application/json')
+ self.end_headers()
+ #self.wfile.write("{'responses' : {'200' : {'description' : 'Success'}}}")
+ self.wfile.write("{'count': 1, 'serverTimeMs': 3}")
+ self.wfile.close()
+ else:
+ self.send_response(respCode)
+
+ '''
+ self.end_headers()
+ self.wfile.write('Client: %s\n' % str(self.client_address))
+ self.wfile.write('User-agent: %s\n' % str(self.headers['user-agent']))
+ self.wfile.write('Path: %s\n' % self.path)
+ self.wfile.write('Form data:\n')
+ self.wfile.close()
+
+ # Echo back information about what was posted in the form
+ for field in form.keys():
+ field_item = form[field]
+ if field_item.filename:
+ # The field contains an uploaded file
+ file_data = field_item.file.read()
+ file_len = len(file_data)
+ del file_data
+ self.wfile.write('\tUploaded %s as "%s" (%d bytes)\n' % \
+ (field, field_item.filename, file_len))
+ else:
+ # Regular form value
+ self.wfile.write('\t%s=%s\n' % (field, form[field].value))
+ '''
+ return
+
+
+ def do_GET(self):
+ """Serve a GET request."""
+ f = self.send_head()
+ if f:
+ try:
+ self.copyfile(f, self.wfile)
+ finally:
+ f.close()
+
+ def do_HEAD(self):
+ """Serve a HEAD request."""
+ f = self.send_head()
+ if f:
+ f.close()
+
+ def send_head(self):
+ """Common code for GET and HEAD commands.
+
+ This sends the response code and MIME headers.
+
+ Return value is either a file object (which has to be copied
+ to the outputfile by the caller unless the command was HEAD,
+ and must be closed by the caller under all circumstances), or
+ None, in which case the caller has nothing further to do.
+
+ """
+ path = self.translate_path(self.path)
+ f = None
+ if os.path.isdir(path):
+ parts = urlparse.urlsplit(self.path)
+ if not parts.path.endswith('/'):
+ # redirect browser - doing basically what apache does
+ self.send_response(301)
+ new_parts = (parts[0], parts[1], parts[2] + '/',
+ parts[3], parts[4])
+ new_url = urlparse.urlunsplit(new_parts)
+ self.send_header("Location", new_url)
+ self.end_headers()
+ return None
+ for index in "index.html", "index.htm":
+ index = os.path.join(path, index)
+ if os.path.exists(index):
+ path = index
+ break
+ else:
+ return self.list_directory(path)
+ ctype = self.guess_type(path)
+ try:
+ # Always read in binary mode. Opening files in text mode may cause
+ # newline translations, making the actual size of the content
+ # transmitted *less* than the content-length!
+ f = open(path, 'rb')
+ except IOError:
+ self.send_error(404, "File not found")
+ return None
+ try:
+ self.send_response(200)
+ self.send_header("Content-type", ctype)
+ fs = os.fstat(f.fileno())
+ self.send_header("Content-Length", str(fs[6]))
+ self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
+ self.end_headers()
+ return f
+ except:
+ f.close()
+ raise
+
+ def list_directory(self, path):
+ """Helper to produce a directory listing (absent index.html).
+
+ Return value is either a file object, or None (indicating an
+ error). In either case, the headers are sent, making the
+ interface the same as for send_head().
+
+ """
+ try:
+ list = os.listdir(path)
+ except os.error:
+ self.send_error(404, "No permission to list directory")
+ return None
+ list.sort(key=lambda a: a.lower())
+ f = StringIO()
+ displaypath = cgi.escape(urllib.unquote(self.path))
+ f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
+ f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath)
+ f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath)
+ f.write("<hr>\n<ul>\n")
+ for name in list:
+ fullname = os.path.join(path, name)
+ displayname = linkname = name
+ # Append / for directories or @ for symbolic links
+ if os.path.isdir(fullname):
+ displayname = name + "/"
+ linkname = name + "/"
+ if os.path.islink(fullname):
+ displayname = name + "@"
+ # Note: a link to a directory displays with @ and links with /
+ f.write('<li><a href="%s">%s</a>\n'
+ % (urllib.quote(linkname), cgi.escape(displayname)))
+ f.write("</ul>\n<hr>\n</body>\n</html>\n")
+ length = f.tell()
+ f.seek(0)
+ self.send_response(200)
+ encoding = sys.getfilesystemencoding()
+ self.send_header("Content-type", "text/html; charset=%s" % encoding)
+ self.send_header("Content-Length", str(length))
+ self.end_headers()
+ return f
+
+ def translate_path(self, path):
+ """Translate a /-separated PATH to the local filename syntax.
+
+ Components that mean special things to the local file system
+ (e.g. drive or directory names) are ignored. (XXX They should
+ probably be diagnosed.)
+
+ """
+ # abandon query parameters
+ path = path.split('?',1)[0]
+ path = path.split('#',1)[0]
+ # Don't forget explicit trailing slash when normalizing. Issue17324
+ trailing_slash = path.rstrip().endswith('/')
+ path = posixpath.normpath(urllib.unquote(path))
+ words = path.split('/')
+ words = filter(None, words)
+ path = os.getcwd()
+ for word in words:
+ if os.path.dirname(word) or word in (os.curdir, os.pardir):
+ # Ignore components that are not a simple file/directory name
+ continue
+ path = os.path.join(path, word)
+ if trailing_slash:
+ path += '/'
+ return path
+
+ def copyfile(self, source, outputfile):
+ """Copy all data between two file objects.
+
+ The SOURCE argument is a file object open for reading
+ (or anything with a read() method) and the DESTINATION
+ argument is a file object open for writing (or
+ anything with a write() method).
+
+ The only reason for overriding this would be to change
+ the block size or perhaps to replace newlines by CRLF
+ -- note however that this the default server uses this
+ to copy binary data as well.
+
+ """
+ shutil.copyfileobj(source, outputfile)
+
+ def guess_type(self, path):
+ """Guess the type of a file.
+
+ Argument is a PATH (a filename).
+
+ Return value is a string of the form type/subtype,
+ usable for a MIME Content-type header.
+
+ The default implementation looks the file's extension
+ up in the table self.extensions_map, using application/octet-stream
+ as a default; however it would be permissible (if
+ slow) to look inside the data to make a better guess.
+
+ """
+
+ base, ext = posixpath.splitext(path)
+ if ext in self.extensions_map:
+ return self.extensions_map[ext]
+ ext = ext.lower()
+ if ext in self.extensions_map:
+ return self.extensions_map[ext]
+ else:
+ return self.extensions_map['']
+
+ if not mimetypes.inited:
+ mimetypes.init() # try to read system mime.types
+ extensions_map = mimetypes.types_map.copy()
+ extensions_map.update({
+ '': 'application/octet-stream', # Default
+ '.py': 'text/plain',
+ '.c': 'text/plain',
+ '.h': 'text/plain',
+ })
+
+def test(HandlerClass = DMaaPHandler,
+ ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0", port=3904):
+ print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5
+ with open(DcaeVariables.CommonEventSchemaV5) as file:
+ global EvtSchema
+ EvtSchema = json.load(file)
+
+ server_address = ('', port)
+
+ HandlerClass.protocol_version = protocol
+ httpd = ServerClass(server_address, HandlerClass)
+
+ global DMaaPHttpd
+ DMaaPHttpd = httpd
+ DcaeVariables.HTTPD = httpd
+
+ sa = httpd.socket.getsockname()
+ print "Serving HTTP on", sa[0], "port", sa[1], "..."
+ #httpd.serve_forever()
+
+def _main_ (HandlerClass = DMaaPHandler,
+ ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0"):
+
+ if sys.argv[1:]:
+ port = int(sys.argv[1])
+ else:
+ port = 3904
+
+ print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5
+ with open(DcaeVariables.CommonEventSchemaV5) as file:
+ global EvtSchema
+ EvtSchema = json.load(file)
+
+ server_address = ('', port)
+
+ HandlerClass.protocol_version = protocol
+ httpd = ServerClass(server_address, HandlerClass)
+
+ sa = httpd.socket.getsockname()
+ print "Serving HTTP on", sa[0], "port", sa[1], "..."
+ httpd.serve_forever()
+
+if __name__ == '__main__':
_main_() \ No newline at end of file
diff --git a/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py b/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py
index 0242ad7ab..e581f1b2c 100644
--- a/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py
+++ b/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py
@@ -1,159 +1,159 @@
-'''
-Created on Aug 18, 2017
-
-@author: sw6830
-'''
-from robot.api import logger
-from Queue import Queue
-import uuid, time, datetime,json, threading,os, platform, subprocess,paramiko
-import DcaeVariables
-import DMaaP
-
-class DcaeLibrary(object):
-
- def __init__(self):
- pass
-
- def setup_dmaap_server(self, portNum=3904):
- if DcaeVariables.HttpServerThread != None:
- DMaaP.cleanUpEvent()
- logger.console("Clean up event from event queue before test")
- logger.info("DMaaP Server already started")
- return "true"
-
- DcaeVariables.IsRobotRun = True
- DMaaP.test(port=portNum)
- try:
- DcaeVariables.VESEventQ = Queue()
- DcaeVariables.HttpServerThread = threading.Thread(name='DMAAP_HTTPServer', target=DMaaP.DMaaPHttpd.serve_forever)
- DcaeVariables.HttpServerThread.start()
- logger.console("DMaaP Mockup Sever started")
- time.sleep(2)
- return "true"
- except Exception as e:
- print (str(e))
- return "false"
-
- def shutdown_dmaap(self):
- if DcaeVariables.HTTPD != None:
- DcaeVariables.HTTPD.shutdown()
- logger.console("DMaaP Server shut down")
- time.sleep(3)
- return "true"
- else:
- return "false"
-
- def cleanup_ves_events(self):
- if DcaeVariables.HttpServerThread != None:
- DMaaP.cleanUpEvent()
- logger.console("DMaaP event queue is cleaned up")
- return "true"
- logger.console("DMaaP server not started yet")
- return "false"
-
- def enable_vesc_https_auth(self):
- if 'Windows' in platform.system():
- try:
- client = paramiko.SSHClient()
- client.load_system_host_keys()
- #client.set_missing_host_key_policy(paramiko.WarningPolicy)
- client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
-
- client.connect(os.environ['CSIT_IP'], port=22, username=os.environ['CSIT_USER'], password=os.environ['CSIT_PD'])
- stdin, stdout, stderr = client.exec_command('%{WORKSPACE}/test/csit/tests/dcaegen2/testcases/resources/vesc_enable_https_auth.sh')
- logger.console(stdout.read())
- finally:
- client.close()
- return
- ws = os.environ['WORKSPACE']
- script2run = ws + "/test/csit/tests/dcaegen2/testcases/resources/vesc_enable_https_auth.sh"
- logger.info("Running script: " + script2run)
- logger.console("Running script: " + script2run)
- subprocess.call(script2run)
- time.sleep(5)
- return
-
- def dmaap_message_receive(self, evtobj, action='contain'):
-
- evtStr = DMaaP.dequeEvent()
- while evtStr != None:
- logger.console("DMaaP receive VES Event:\n" + evtStr)
- if action == 'contain':
- if evtobj in evtStr:
- logger.info("DMaaP Receive Expected Publish Event:\n" + evtStr)
- return 'true'
- if action == 'sizematch':
- if len(evtobj) == len(evtStr):
- return 'true'
- if action == 'dictmatch':
- evtDict = json.loads(evtStr)
- if cmp(evtobj, evtDict) == 0:
- return 'true'
- evtStr = DMaaP.dequeEvent()
- return 'false'
-
- def create_header_from_string(self, dictStr):
- logger.info("Enter create_header_from_string: dictStr")
- return dict(u.split("=") for u in dictStr.split(","))
-
- def is_json_empty(self, resp):
- logger.info("Enter is_json_empty: resp.text: " + resp.text)
- if resp.text == None or len(resp.text) < 2:
- return 'True'
- return 'False'
-
- def Generate_UUID(self):
- """generate a uuid"""
- return uuid.uuid4()
-
- def get_json_value_list(self, jsonstr, keyval):
- logger.info("Enter Get_Json_Key_Value_List")
- if jsonstr == None or len(jsonstr) < 2:
- logger.info("No Json data found")
- return []
- try:
- data = json.loads(jsonstr)
- nodelist = []
- for item in data:
- nodelist.append(item[keyval])
- return nodelist
- except Exception as e:
- logger.info("Json data parsing fails")
- print str(e)
- return []
-
- def generate_MilliTimestamp_UUID(self):
- """generate a millisecond timestamp uuid"""
- then = datetime.datetime.now()
- return int(time.mktime(then.timetuple())*1e3 + then.microsecond/1e3)
-
- def test (self):
- import json
- from pprint import pprint
-
- with open('robot/assets/dcae/ves_volte_single_fault_event.json') as data_file:
- data = json.load(data_file)
-
- data['event']['commonEventHeader']['version'] = '5.0'
- pprint(data)
-
-
-
-if __name__ == '__main__':
- '''
- dictStr = "action=getTable,Accept=application/json,Content-Type=application/json,X-FromAppId=1234908903284"
- cls = DcaeLibrary()
- #dict = cls.create_header_from_string(dictStr)
- #print str(dict)
- jsonStr = "[{'Node': 'onapfcnsl00', 'CheckID': 'serfHealth', 'Name': 'Serf Health Status', 'ServiceName': '', 'Notes': '', 'ModifyIndex': 6, 'Status': 'passing', 'ServiceID': '', 'ServiceTags': [], 'Output': 'Agent alive and reachable', 'CreateIndex': 6}]"
- lsObj = cls.get_json_value_list(jsonStr, 'Status')
- print lsObj
- '''
-
- lib = DcaeLibrary()
- lib.enable_vesc_https_auth()
-
- ret = lib.setup_dmaap_server()
- print ret
- time.sleep(100000)
-
+'''
+Created on Aug 18, 2017
+
+@author: sw6830
+'''
+from robot.api import logger
+from Queue import Queue
+import uuid, time, datetime,json, threading,os, platform, subprocess,paramiko
+import DcaeVariables
+import DMaaP
+
+class DcaeLibrary(object):
+
+ def __init__(self):
+ pass
+
+ def setup_dmaap_server(self, portNum=3904):
+ if DcaeVariables.HttpServerThread != None:
+ DMaaP.cleanUpEvent()
+ logger.console("Clean up event from event queue before test")
+ logger.info("DMaaP Server already started")
+ return "true"
+
+ DcaeVariables.IsRobotRun = True
+ DMaaP.test(port=portNum)
+ try:
+ DcaeVariables.VESEventQ = Queue()
+ DcaeVariables.HttpServerThread = threading.Thread(name='DMAAP_HTTPServer', target=DMaaP.DMaaPHttpd.serve_forever)
+ DcaeVariables.HttpServerThread.start()
+ logger.console("DMaaP Mockup Sever started")
+ time.sleep(2)
+ return "true"
+ except Exception as e:
+ print (str(e))
+ return "false"
+
+ def shutdown_dmaap(self):
+ if DcaeVariables.HTTPD != None:
+ DcaeVariables.HTTPD.shutdown()
+ logger.console("DMaaP Server shut down")
+ time.sleep(3)
+ return "true"
+ else:
+ return "false"
+
+ def cleanup_ves_events(self):
+ if DcaeVariables.HttpServerThread != None:
+ DMaaP.cleanUpEvent()
+ logger.console("DMaaP event queue is cleaned up")
+ return "true"
+ logger.console("DMaaP server not started yet")
+ return "false"
+
+ def enable_vesc_https_auth(self):
+ if 'Windows' in platform.system():
+ try:
+ client = paramiko.SSHClient()
+ client.load_system_host_keys()
+ #client.set_missing_host_key_policy(paramiko.WarningPolicy)
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+
+ client.connect(os.environ['CSIT_IP'], port=22, username=os.environ['CSIT_USER'], password=os.environ['CSIT_PD'])
+ stdin, stdout, stderr = client.exec_command('%{WORKSPACE}/test/csit/tests/dcaegen2/testcases/resources/vesc_enable_https_auth.sh')
+ logger.console(stdout.read())
+ finally:
+ client.close()
+ return
+ ws = os.environ['WORKSPACE']
+ script2run = ws + "/test/csit/tests/dcaegen2/testcases/resources/vesc_enable_https_auth.sh"
+ logger.info("Running script: " + script2run)
+ logger.console("Running script: " + script2run)
+ subprocess.call(script2run)
+ time.sleep(5)
+ return
+
+ def dmaap_message_receive(self, evtobj, action='contain'):
+
+ evtStr = DMaaP.dequeEvent()
+ while evtStr != None:
+ logger.console("DMaaP receive VES Event:\n" + evtStr)
+ if action == 'contain':
+ if evtobj in evtStr:
+ logger.info("DMaaP Receive Expected Publish Event:\n" + evtStr)
+ return 'true'
+ if action == 'sizematch':
+ if len(evtobj) == len(evtStr):
+ return 'true'
+ if action == 'dictmatch':
+ evtDict = json.loads(evtStr)
+ if cmp(evtobj, evtDict) == 0:
+ return 'true'
+ evtStr = DMaaP.dequeEvent()
+ return 'false'
+
+ def create_header_from_string(self, dictStr):
+ logger.info("Enter create_header_from_string: dictStr")
+ return dict(u.split("=") for u in dictStr.split(","))
+
+ def is_json_empty(self, resp):
+ logger.info("Enter is_json_empty: resp.text: " + resp.text)
+ if resp.text == None or len(resp.text) < 2:
+ return 'True'
+ return 'False'
+
+ def Generate_UUID(self):
+ """generate a uuid"""
+ return uuid.uuid4()
+
+ def get_json_value_list(self, jsonstr, keyval):
+ logger.info("Enter Get_Json_Key_Value_List")
+ if jsonstr == None or len(jsonstr) < 2:
+ logger.info("No Json data found")
+ return []
+ try:
+ data = json.loads(jsonstr)
+ nodelist = []
+ for item in data:
+ nodelist.append(item[keyval])
+ return nodelist
+ except Exception as e:
+ logger.info("Json data parsing fails")
+ print str(e)
+ return []
+
+ def generate_MilliTimestamp_UUID(self):
+ """generate a millisecond timestamp uuid"""
+ then = datetime.datetime.now()
+ return int(time.mktime(then.timetuple())*1e3 + then.microsecond/1e3)
+
+ def test (self):
+ import json
+ from pprint import pprint
+
+ with open('robot/assets/dcae/ves_volte_single_fault_event.json') as data_file:
+ data = json.load(data_file)
+
+ data['event']['commonEventHeader']['version'] = '5.0'
+ pprint(data)
+
+
+
+if __name__ == '__main__':
+ '''
+ dictStr = "action=getTable,Accept=application/json,Content-Type=application/json,X-FromAppId=1234908903284"
+ cls = DcaeLibrary()
+ #dict = cls.create_header_from_string(dictStr)
+ #print str(dict)
+ jsonStr = "[{'Node': 'onapfcnsl00', 'CheckID': 'serfHealth', 'Name': 'Serf Health Status', 'ServiceName': '', 'Notes': '', 'ModifyIndex': 6, 'Status': 'passing', 'ServiceID': '', 'ServiceTags': [], 'Output': 'Agent alive and reachable', 'CreateIndex': 6}]"
+ lsObj = cls.get_json_value_list(jsonStr, 'Status')
+ print lsObj
+ '''
+
+ lib = DcaeLibrary()
+ lib.enable_vesc_https_auth()
+
+ ret = lib.setup_dmaap_server()
+ print ret
+ time.sleep(100000)
+
diff --git a/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot b/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot
index 59d44e158..98b341529 100644
--- a/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot
+++ b/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot
@@ -1,133 +1,133 @@
- *** Settings ***
-Documentation The main interface for interacting with DCAE. It handles low level stuff like managing the http request library and DCAE required fields
-Library RequestsLibrary
-Library DcaeLibrary
-Library OperatingSystem
-Library Collections
-Variables ../resources/DcaeVariables.py
-Resource ../resources/dcae_properties.robot
-*** Variables ***
-${DCAE_HEALTH_CHECK_BODY} %{WORKSPACE}/test/csit/tests/dcae/testcases/assets/json_events/dcae_healthcheck.json
-*** Keywords ***
-Get DCAE Nodes
- [Documentation] Get DCAE Nodes from Consul Catalog
- #Log Creating session ${GLOBAL_DCAE_CONSUL_URL}
- ${session}= Create Session dcae ${GLOBAL_DCAE_CONSUL_URL}
- ${uuid}= Generate UUID
- ${headers}= Create Dictionary Accept=application/json Content-Type=application/json X-Consul-Token=abcd1234 X-TransactionId=${GLOBAL_APPLICATION_ID}-${uuid} X-FromAppId=${GLOBAL_APPLICATION_ID}
- ${resp}= Get Request dcae /v1/catalog/nodes headers=${headers}
- Log Received response from dcae consul: ${resp.json()}
- Should Be Equal As Strings ${resp.status_code} 200
- ${NodeList}= Get Json Value List ${resp.text} Node
- ${NodeListLength}= Get Length ${NodeList}
- ${len}= Get Length ${NodeList}
- Should Not Be Equal As Integers ${len} 0
- [return] ${NodeList}
-DCAE Node Health Check
- [Documentation] Perform DCAE Node Health Check
- [Arguments] ${NodeName}
- ${session}= Create Session dcae-${NodeName} ${GLOBAL_DCAE_CONSUL_URL}
- ${uuid}= Generate UUID
- ${headers}= Create Dictionary Accept=application/json Content-Type=application/json X-Consul-Token=abcd1234 X-TransactionId=${GLOBAL_APPLICATION_ID}-${uuid} X-FromAppId=${GLOBAL_APPLICATION_ID}
- ${hcpath}= Catenate SEPARATOR= /v1/health/node/ ${NodeName}
- ${resp}= Get Request dcae-${NodeName} ${hcpath} headers=${headers}
- Log Received response from dcae consul: ${resp.json()}
- Should Be Equal As Strings ${resp.status_code} 200
- ${StatusList}= Get Json Value List ${resp.text} Status
- ${len}= Get Length ${StatusList}
- Should Not Be Equal As Integers ${len} 0
- DCAE Check Health Status ${NodeName} ${StatusList[0]} Serf Health Status
- #Run Keyword if ${len} > 1 DCAE Check Health Status ${NodeName} ${StatusList[1]} Serf Health Status
-DCAE Check Health Status
- [Arguments] ${NodeName} ${ItemStatus} ${CheckType}
- Should Be Equal As Strings ${ItemStatus} passing
- Log Node: ${NodeName} ${CheckType} check pass ok
-VES Collector Suite Setup DMaaP
- [Documentation] Start DMaaP Mockup Server
- ${ret}= Setup DMaaP Server
- Should Be Equal As Strings ${ret} true
-VES Collector Suite Shutdown DMaaP
- [Documentation] Shutdown DMaaP Mockup Server
- ${ret}= Shutdown DMaap
- Should Be Equal As Strings ${ret} true
-Check DCAE Results
- [Documentation] Parse DCAE JSON response and make sure all rows have healthTestStatus=GREEN
- [Arguments] ${json}
- @{rows}= Get From Dictionary ${json['returns']} rows
- @{headers}= Get From Dictionary ${json['returns']} columns
- # Retrieve column names from headers
- ${columns}= Create List
- :for ${header} in @{headers}
- \ ${colName}= Get From Dictionary ${header} colName
- \ Append To List ${columns} ${colName}
- # Process each row making sure status=GREEN
- :for ${row} in @{rows}
- \ ${cells}= Get From Dictionary ${row} cells
- \ ${dict}= Make A Dictionary ${cells} ${columns}
- \ Dictionary Should Contain Item ${dict} healthTestStatus GREEN
-Make A Dictionary
- [Documentation] Given a list of column names and a list of dictionaries, map columname=value
- [Arguments] ${columns} ${names} ${valuename}=value
- ${dict}= Create Dictionary
- ${collength}= Get Length ${columns}
- ${namelength}= Get Length ${names}
- :for ${index} in range 0 ${collength}
- \ ${name}= Evaluate ${names}[${index}]
- \ ${valued}= Evaluate ${columns}[${index}]
- \ ${value}= Get From Dictionary ${valued} ${valueName}
- \ Set To Dictionary ${dict} ${name} ${value}
- [Return] ${dict}
-Get Event Data From File
- [Arguments] ${jsonfile}
- ${data}= OperatingSystem.Get File ${jsonfile}
- #Should Not Be_Equal ${data} None
- [return] ${data}
-Json String To Dictionary
- [Arguments] ${json_string}
- ${json_dict}= evaluate json.loads('''${json_string}''') json
- [return] ${json_dict}
-Dictionary To Json String
- [Arguments] ${json_dict}
- ${json_string}= evaluate json.dumps(${json_dict}) json
- [return] ${json_string}
-Get DCAE Service Component Status
- [Documentation] Get the status of a DCAE Service Component
- [Arguments] ${url} ${urlpath} ${usr} ${passwd}
- ${auth}= Create List ${usr} ${passwd}
- ${session}= Create Session dcae-service-component ${url} auth=${auth}
- ${resp}= Get Request dcae-service-component ${urlpath}
- [return] ${resp}
-Publish Event To VES Collector No Auth
- [Documentation] Send an event to VES Collector
- [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata}
- Log Creating session ${url}
- ${session}= Create Session dcaegen2-d1 ${url}
- ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
- #Log Received response from dcae ${resp.json()}
- [return] ${resp}
-Publish Event To VES Collector
- [Documentation] Send an event to VES Collector
- [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd}
- ${auth}= Create List ${user} ${pd}
- Log Creating session ${url}
- ${session}= Create Session dcaegen2-d1 ${url} auth=${auth} disable_warnings=1
- ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
- #Log Received response from dcae ${resp.json()}
- [return] ${resp}
-Publish Event To VES Collector With Put Method
- [Documentation] Send an event to VES Collector
- [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd}
- ${auth}= Create List ${user} ${pd}
- Log Creating session ${url}
- ${session}= Create Session dcae-d1 ${url} auth=${auth}
- ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
- #Log Received response from dcae ${resp.json()}
- [return] ${resp}
-Publish Event To VES Collector With Put Method No Auth
- [Documentation] Send an event to VES Collector
- [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata}
- Log Creating session ${url}
- ${session}= Create Session dcae-d1 ${url}
- ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
- #Log Received response from dcae ${resp.json()}
- [return] ${resp}
+ *** Settings ***
+Documentation The main interface for interacting with DCAE. It handles low level stuff like managing the http request library and DCAE required fields
+Library RequestsLibrary
+Library DcaeLibrary
+Library OperatingSystem
+Library Collections
+Variables ../resources/DcaeVariables.py
+Resource ../resources/dcae_properties.robot
+*** Variables ***
+${DCAE_HEALTH_CHECK_BODY} %{WORKSPACE}/test/csit/tests/dcae/testcases/assets/json_events/dcae_healthcheck.json
+*** Keywords ***
+Get DCAE Nodes
+ [Documentation] Get DCAE Nodes from Consul Catalog
+ #Log Creating session ${GLOBAL_DCAE_CONSUL_URL}
+ ${session}= Create Session dcae ${GLOBAL_DCAE_CONSUL_URL}
+ ${uuid}= Generate UUID
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json X-Consul-Token=abcd1234 X-TransactionId=${GLOBAL_APPLICATION_ID}-${uuid} X-FromAppId=${GLOBAL_APPLICATION_ID}
+ ${resp}= Get Request dcae /v1/catalog/nodes headers=${headers}
+ Log Received response from dcae consul: ${resp.json()}
+ Should Be Equal As Strings ${resp.status_code} 200
+ ${NodeList}= Get Json Value List ${resp.text} Node
+ ${NodeListLength}= Get Length ${NodeList}
+ ${len}= Get Length ${NodeList}
+ Should Not Be Equal As Integers ${len} 0
+ [return] ${NodeList}
+DCAE Node Health Check
+ [Documentation] Perform DCAE Node Health Check
+ [Arguments] ${NodeName}
+ ${session}= Create Session dcae-${NodeName} ${GLOBAL_DCAE_CONSUL_URL}
+ ${uuid}= Generate UUID
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json X-Consul-Token=abcd1234 X-TransactionId=${GLOBAL_APPLICATION_ID}-${uuid} X-FromAppId=${GLOBAL_APPLICATION_ID}
+ ${hcpath}= Catenate SEPARATOR= /v1/health/node/ ${NodeName}
+ ${resp}= Get Request dcae-${NodeName} ${hcpath} headers=${headers}
+ Log Received response from dcae consul: ${resp.json()}
+ Should Be Equal As Strings ${resp.status_code} 200
+ ${StatusList}= Get Json Value List ${resp.text} Status
+ ${len}= Get Length ${StatusList}
+ Should Not Be Equal As Integers ${len} 0
+ DCAE Check Health Status ${NodeName} ${StatusList[0]} Serf Health Status
+ #Run Keyword if ${len} > 1 DCAE Check Health Status ${NodeName} ${StatusList[1]} Serf Health Status
+DCAE Check Health Status
+ [Arguments] ${NodeName} ${ItemStatus} ${CheckType}
+ Should Be Equal As Strings ${ItemStatus} passing
+ Log Node: ${NodeName} ${CheckType} check pass ok
+VES Collector Suite Setup DMaaP
+ [Documentation] Start DMaaP Mockup Server
+ ${ret}= Setup DMaaP Server
+ Should Be Equal As Strings ${ret} true
+VES Collector Suite Shutdown DMaaP
+ [Documentation] Shutdown DMaaP Mockup Server
+ ${ret}= Shutdown DMaap
+ Should Be Equal As Strings ${ret} true
+Check DCAE Results
+ [Documentation] Parse DCAE JSON response and make sure all rows have healthTestStatus=GREEN
+ [Arguments] ${json}
+ @{rows}= Get From Dictionary ${json['returns']} rows
+ @{headers}= Get From Dictionary ${json['returns']} columns
+ # Retrieve column names from headers
+ ${columns}= Create List
+ :for ${header} in @{headers}
+ \ ${colName}= Get From Dictionary ${header} colName
+ \ Append To List ${columns} ${colName}
+ # Process each row making sure status=GREEN
+ :for ${row} in @{rows}
+ \ ${cells}= Get From Dictionary ${row} cells
+ \ ${dict}= Make A Dictionary ${cells} ${columns}
+ \ Dictionary Should Contain Item ${dict} healthTestStatus GREEN
+Make A Dictionary
+ [Documentation] Given a list of column names and a list of dictionaries, map columname=value
+ [Arguments] ${columns} ${names} ${valuename}=value
+ ${dict}= Create Dictionary
+ ${collength}= Get Length ${columns}
+ ${namelength}= Get Length ${names}
+ :for ${index} in range 0 ${collength}
+ \ ${name}= Evaluate ${names}[${index}]
+ \ ${valued}= Evaluate ${columns}[${index}]
+ \ ${value}= Get From Dictionary ${valued} ${valueName}
+ \ Set To Dictionary ${dict} ${name} ${value}
+ [Return] ${dict}
+Get Event Data From File
+ [Arguments] ${jsonfile}
+ ${data}= OperatingSystem.Get File ${jsonfile}
+ #Should Not Be_Equal ${data} None
+ [return] ${data}
+Json String To Dictionary
+ [Arguments] ${json_string}
+ ${json_dict}= evaluate json.loads('''${json_string}''') json
+ [return] ${json_dict}
+Dictionary To Json String
+ [Arguments] ${json_dict}
+ ${json_string}= evaluate json.dumps(${json_dict}) json
+ [return] ${json_string}
+Get DCAE Service Component Status
+ [Documentation] Get the status of a DCAE Service Component
+ [Arguments] ${url} ${urlpath} ${usr} ${passwd}
+ ${auth}= Create List ${usr} ${passwd}
+ ${session}= Create Session dcae-service-component ${url} auth=${auth}
+ ${resp}= Get Request dcae-service-component ${urlpath}
+ [return] ${resp}
+Publish Event To VES Collector No Auth
+ [Documentation] Send an event to VES Collector
+ [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata}
+ Log Creating session ${url}
+ ${session}= Create Session dcaegen2-d1 ${url}
+ ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
+ #Log Received response from dcae ${resp.json()}
+ [return] ${resp}
+Publish Event To VES Collector
+ [Documentation] Send an event to VES Collector
+ [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd}
+ ${auth}= Create List ${user} ${pd}
+ Log Creating session ${url}
+ ${session}= Create Session dcaegen2-d1 ${url} auth=${auth} disable_warnings=1
+ ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
+ #Log Received response from dcae ${resp.json()}
+ [return] ${resp}
+Publish Event To VES Collector With Put Method
+ [Documentation] Send an event to VES Collector
+ [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd}
+ ${auth}= Create List ${user} ${pd}
+ Log Creating session ${url}
+ ${session}= Create Session dcae-d1 ${url} auth=${auth}
+ ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
+ #Log Received response from dcae ${resp.json()}
+ [return] ${resp}
+Publish Event To VES Collector With Put Method No Auth
+ [Documentation] Send an event to VES Collector
+ [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata}
+ Log Creating session ${url}
+ ${session}= Create Session dcae-d1 ${url}
+ ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders}
+ #Log Received response from dcae ${resp.json()}
+ [return] ${resp}
diff --git a/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot b/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot
index be072d73c..692488814 100644
--- a/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot
+++ b/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot
@@ -1,15 +1,15 @@
-Documentation store all properties that can change or are used in multiple places here
-... format is all caps with underscores between words and prepended with GLOBAL
-... make sure you prepend them with GLOBAL so that other files can easily see it is from this file.
-
-
-
-*** Variables ***
-${GLOBAL_APPLICATION_ID} robot-dcaegen2
-${GLOBAL_DCAE_CONSUL_URL} http://135.205.228.129:8500
-${GLOBAL_DCAE_CONSUL_URL1} http://135.205.228.170:8500
-${GLOBAL_DCAE_VES_URL} http://localhost:8443/eventlistener/v5
-${GLOBAL_DCAE_USERNAME} console
-${GLOBAL_DCAE_PASSWORD} ZjJkYjllMjljMTI2M2Iz
-${VESC_HTTPS_USER} sample1
-${VESC_HTTPS_PD} sample1
+Documentation store all properties that can change or are used in multiple places here
+... format is all caps with underscores between words and prepended with GLOBAL
+... make sure you prepend them with GLOBAL so that other files can easily see it is from this file.
+
+
+
+*** Variables ***
+${GLOBAL_APPLICATION_ID} robot-dcaegen2
+${GLOBAL_DCAE_CONSUL_URL} http://135.205.228.129:8500
+${GLOBAL_DCAE_CONSUL_URL1} http://135.205.228.170:8500
+${GLOBAL_DCAE_VES_URL} http://localhost:8443/eventlistener/v5
+${GLOBAL_DCAE_USERNAME} console
+${GLOBAL_DCAE_PASSWORD} ZjJkYjllMjljMTI2M2Iz
+${VESC_HTTPS_USER} sample1
+${VESC_HTTPS_PD} sample1
diff --git a/test/csit/tests/multicloud-vmware/hosts/sanity-host.robot b/test/csit/tests/multicloud-vmware/hosts/sanity-host.robot
new file mode 100644
index 000000000..e74a79973
--- /dev/null
+++ b/test/csit/tests/multicloud-vmware/hosts/sanity-host.robot
@@ -0,0 +1,24 @@
+*** settings ***
+Resource ../../common.robot
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library HttpLibrary.HTTP
+
+
+*** Variables ***
+@{return_ok_list}= 200 201 202
+
+
+*** Test Cases ***
+
+TestGetHost
+ [Documentation] Sanity Test - Get Host
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= Get Request web_session api/multicloud-vio/v0/vmware_fake/1234/hosts/1
+ ${response_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${response_code}
+ ${response_json} json.loads ${resp.content}
+ #Log To Console ${response_json}
diff --git a/test/csit/tests/multicloud-vmware/images/sanity-image.robot b/test/csit/tests/multicloud-vmware/images/sanity-image.robot
new file mode 100644
index 000000000..390433d5c
--- /dev/null
+++ b/test/csit/tests/multicloud-vmware/images/sanity-image.robot
@@ -0,0 +1,24 @@
+*** settings ***
+Resource ../../common.robot
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library HttpLibrary.HTTP
+
+
+*** Variables ***
+@{return_ok_list}= 200 201 202
+
+
+*** Test Cases ***
+
+TestGetHost
+ [Documentation] Sanity Test - Get Image
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= Get Request web_session api/multicloud-vio/v0/vmware_fake/1234/images/1
+ ${response_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${response_code}
+ ${response_json} json.loads ${resp.content}
+ #Log To Console ${response_json}
diff --git a/test/csit/tests/multicloud-vmware/networks/sanity-network.robot b/test/csit/tests/multicloud-vmware/networks/sanity-network.robot
new file mode 100644
index 000000000..5433f18cb
--- /dev/null
+++ b/test/csit/tests/multicloud-vmware/networks/sanity-network.robot
@@ -0,0 +1,24 @@
+*** settings ***
+Resource ../../common.robot
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+Library HttpLibrary.HTTP
+
+
+*** Variables ***
+@{return_ok_list}= 200 201 202
+
+
+*** Test Cases ***
+
+TestGetHost
+ [Documentation] Sanity Test - Get Network
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= Get Request web_session api/multicloud-vio/v0/vmware_fake/1234/networks/1
+ ${response_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${response_code}
+ ${response_json} json.loads ${resp.content}
+ #Log To Console ${response_json}
diff --git a/test/csit/tests/multicloud-vmware/provision/jsoninput/image_file.json b/test/csit/tests/multicloud-vmware/provision/jsoninput/image_file.json
new file mode 100644
index 000000000..1e3cac6f5
--- /dev/null
+++ b/test/csit/tests/multicloud-vmware/provision/jsoninput/image_file.json
@@ -0,0 +1,7 @@
+{
+ "name": "cirros-0.3.2-x86_64-disk",
+ "container_format": "bare",
+ "disk_format": "qcow2",
+ "visibility": "public",
+ "schema": "/v2/schemas/image"
+} \ No newline at end of file
diff --git a/test/csit/tests/multicloud-vmware/provision/sanity_test_image.robot b/test/csit/tests/multicloud-vmware/provision/sanity_test_image.robot
index 0a6f2f5e8..e8e36dc14 100644
--- a/test/csit/tests/multicloud-vmware/provision/sanity_test_image.robot
+++ b/test/csit/tests/multicloud-vmware/provision/sanity_test_image.robot
@@ -13,10 +13,13 @@ Library HttpLibrary.HTTP
${get_token_url} /api/multicloud-vio/v0/vmware_fake/identity/v3/auth/tokens
${get_image_url} /api/multicloud-vio/v0/vmware_fake/glance/v2/images
${get_image_schema_url} /api/multicloud-vio/v0/vmware_fake/glance/v2/schemas/image
+${image_service} /api/multicloud-vio/v0/vmware_fake/glance/v2/image/file
+
#json files
${auth_info_json} ${SCRIPTS}/../tests/multicloud-vmware/provision/jsoninput/auth_info.json
+${image_file} ${SCRIPTS}/../tests/multicloud-vmware/provision/jsoninput/image_file.json
#global vars
${TOKEN}
@@ -42,7 +45,7 @@ GetAuthToken
-TestCaseShoeImageSchema
+TestCaseShowImageSchema
[Documentation] Sanity test - Show Image Schema
${headers} Create Dictionary Content-Type=application/json Accept=application/json X-Auth-Token=${TOKEN}
Create Session web_session http://${VIO_IP}:9004 headers=${headers}
@@ -76,3 +79,32 @@ TestCaseShowImage
List Should Contain Value ${return_ok_list} ${responese_code}
${response_json} json.loads ${resp.content}
Should Be Equal ${response_json['status']} active
+
+
+
+
+TestCaseUploadImage
+ [Documentation] Sanity test - Upload Image
+ ${json_value}= json_from_file ${image_file}
+ ${json_string}= string_from_json ${json_value}
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json X-Auth-Token=${TOKEN}
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= POST Request web_session ${image_service} ${json_string}
+ ${responese_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${responese_code}
+ ${response_json} json.loads ${resp.content}
+ ${IMAGEID}= Convert To String ${response_json['id']}
+ Set Global Variable ${IMAGEID}
+
+
+
+
+TestCaseDownloadImage
+ [Documentation] Sanity test - Download Image
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json X-Auth-Token=${TOKEN}
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= Get Request web_session ${image_service}/${IMAGEID}
+ ${responese_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${responese_code}
+ ${response_json} json.loads ${resp.content}
+ Should Be Equal ${response_json['status']} active \ No newline at end of file
diff --git a/test/csit/tests/multicloud-vmware/samples/sanity-sample.robot b/test/csit/tests/multicloud-vmware/samples/sanity-sample.robot
new file mode 100644
index 000000000..fcb784b27
--- /dev/null
+++ b/test/csit/tests/multicloud-vmware/samples/sanity-sample.robot
@@ -0,0 +1,25 @@
+*** settings ***
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+
+*** Variables ***
+@{return_ok_list}= 200 201 202
+${querysample_vio_url} /samples
+
+*** Test Cases ***
+VioSwaggerTest
+ [Documentation] query swagger info rest test
+ ${headers} Create Dictionary Content-Type=application/json X-TRANSACTIONID=123456 Accept=application/json
+ Create Session web_session http://${VIO_IP}:9004 headers=${headers}
+ ${resp}= Get Request web_session ${querysample_vio_url}
+ ${responese_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${responese_code}
+ # verify logging output
+ ${response_json} json.loads ${resp.content}
+ ${logs}= Convert To String ${response_json['logs']}
+ Log To Console ${logs}
+ Should Contain ${logs} 123456
+ Should Contain ${logs} multicloud-vio
+ Should Contain ${logs} vio.samples.views \ No newline at end of file
diff --git a/test/csit/tests/multicloud/provision/data/capacity.json b/test/csit/tests/multicloud/provision/data/capacity.json
new file mode 100644
index 000000000..9b1130d08
--- /dev/null
+++ b/test/csit/tests/multicloud/provision/data/capacity.json
@@ -0,0 +1,6 @@
+{
+ "vCPU": 1,
+ "Memory": 1,
+ "Storage": 1,
+ "VIMs": ["vmware_fake"]
+} \ No newline at end of file
diff --git a/test/csit/tests/multicloud/provision/sanity_test_multivim.robot b/test/csit/tests/multicloud/provision/sanity_test_multivim.robot
index 2c1ec3f9f..4848b7459 100644
--- a/test/csit/tests/multicloud/provision/sanity_test_multivim.robot
+++ b/test/csit/tests/multicloud/provision/sanity_test_multivim.robot
@@ -1,4 +1,5 @@
*** settings ***
+Resource ../../common.robot
Library Collections
Library RequestsLibrary
Library OperatingSystem
@@ -7,6 +8,7 @@ Library json
*** Variables ***
@{return_ok_list}= 200 201 202
${queryswagger_broker_url} /api/multicloud/v0/swagger.json
+${check_capacity_broker_url} /api/multicloud/v0/check_vim_capacity
*** Test Cases ***
@@ -20,3 +22,12 @@ BrokerSwaggerTest
${response_json} json.loads ${resp.content}
${swagger_version}= Convert To String ${response_json['swagger']}
Should Be Equal ${swagger_version} 2.0
+
+BrokerCapacityTest
+ [Documentation] Check VIMs capacity
+ ${data}= Get Binary File ${CURDIR}${/}data${/}capacity.json
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${BROKER_IP}:9001 headers=${headers}
+ ${resp}= Post Request web_session ${check_capacity_broker_url} ${data}
+ ${responese_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${responese_code}
diff --git a/test/csit/tests/music/music-distributed-kv-store-suite/__init__.robot b/test/csit/tests/music/music-distributed-kv-store-suite/__init__.robot
new file mode 100644
index 000000000..d1da7f385
--- /dev/null
+++ b/test/csit/tests/music/music-distributed-kv-store-suite/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Integration - Suite 1 \ No newline at end of file
diff --git a/test/csit/tests/music/music-distributed-kv-store-suite/data/register_domain.json b/test/csit/tests/music/music-distributed-kv-store-suite/data/register_domain.json
new file mode 100644
index 000000000..96811ee97
--- /dev/null
+++ b/test/csit/tests/music/music-distributed-kv-store-suite/data/register_domain.json
@@ -0,0 +1,3 @@
+{
+ "domain":"test_domain"
+} \ No newline at end of file
diff --git a/test/csit/tests/music/music-distributed-kv-store-suite/music-distributed-kv-store-test.robot b/test/csit/tests/music/music-distributed-kv-store-suite/music-distributed-kv-store-test.robot
new file mode 100644
index 000000000..de26e5f6e
--- /dev/null
+++ b/test/csit/tests/music/music-distributed-kv-store-suite/music-distributed-kv-store-test.robot
@@ -0,0 +1,53 @@
+*** Settings ***
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Variables ***
+${MESSAGE} {"ping": "ok"}
+
+#global variables
+${generatedAID}
+
+*** Test Cases ***
+DKV Check Distributed KV Store API Docker Container
+ [Documentation] Checks if DKV docker container is running
+ ${rc} ${output}= Run and Return RC and Output docker ps
+ Log To Console *********************
+ Log To Console retrurn_code = ${rc}
+ Log To Console output = ${output}
+ Should Be Equal As Integers ${rc} 0
+ Should Contain ${output} nexus3.onap.org:10001/onap/music/distributed-kv-store
+
+DKV LoadDefaultProperties
+ [Documentation] Loads default configuration files into Consul
+ Create Session dkv ${DKV_HOSTNAME}:${DKV_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request dkv /v1/config/load-default headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+DKV FetchDefaultProperties
+ [Documentation] Fetches all default keys from Consul
+ Create Session dkv ${DKV_HOSTNAME}:${DKV_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request dkv /v1/getconfigs headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+#DKV RegisterDomain
+# [Documentation] Send a POST request to create a domain
+# Create Session dkv ${DKV_HOSTNAME}:${DKV_PORT}
+# ${data}= Get Binary File ${CURDIR}${/}data${/}register_domain.json
+# &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+# ${resp}= Post Request dkv v1/register data=${data} headers=${headers}
+# Log To Console *********************
+# Log To Console response = ${resp}
+# Log To Console body = ${resp.text}
+# Should Be Equal As Integers ${resp.status_code} 200
+
+*** Keywords ***
diff --git a/test/csit/tests/optf-has/has/data/healthcheck.json b/test/csit/tests/optf-has/has/data/healthcheck.json
new file mode 100644
index 000000000..926bb2898
--- /dev/null
+++ b/test/csit/tests/optf-has/has/data/healthcheck.json
@@ -0,0 +1,19 @@
+{
+ "consistencyInfo": {
+ "type": "eventual"
+ },
+ "values": {
+ "created": 1479482603641,
+ "message": "",
+ "name": "foo",
+ "recommend_max": 1,
+ "solution": "{\"healthcheck\": \" healthcheck\"}",
+ "status": "solved",
+ "template": "{\"healthcheck\": \"healthcheck\"}",
+ "timeout": 3600,
+ "translation": "{\"healthcheck\": \" healthcheck\"}",
+ "updated": 1484324150629
+ }
+}
+
+
diff --git a/test/csit/tests/optf-has/has/data/onboard.json b/test/csit/tests/optf-has/has/data/onboard.json
new file mode 100644
index 000000000..a4939c459
--- /dev/null
+++ b/test/csit/tests/optf-has/has/data/onboard.json
@@ -0,0 +1,6 @@
+{
+ "appname": "conductor",
+ "userId": "conductor",
+ "isAAF": "false",
+ "password": "c0nduct0r"
+}
diff --git a/test/csit/tests/optf-has/has/data/plan_with_lati_and_longi.json b/test/csit/tests/optf-has/has/data/plan_with_lati_and_longi.json
new file mode 100644
index 000000000..5e35d6abf
--- /dev/null
+++ b/test/csit/tests/optf-has/has/data/plan_with_lati_and_longi.json
@@ -0,0 +1,41 @@
+{
+ "name":"onap template with lati and longi without constraints and without optimizations",
+ "template":{
+ "homing_template_version":"2017-10-10",
+ "parameters":{
+ "service_name":"Residential vCPE",
+ "service_id":"vcpe_service_id",
+ "customer_lat":45.395968,
+ "customer_long":-71.135344,
+ "physical_location":"DLLSTX233",
+ "REQUIRED_MEM":4,
+ "REQUIRED_DISK":100,
+ "pnf_id":"some_pnf_id"
+ },
+ "locations":{
+ "customer_loc":{
+ "latitude":{
+ "get_param":"customer_lat"
+ },
+ "longitude":{
+ "get_param":"customer_long"
+ }
+ }
+ },
+ "demands":{
+ "vG":[
+ {
+ "inventory_provider":"aai",
+ "inventory_type":"cloud"
+ }
+ ]
+ },
+ "constraints":{
+
+ },
+ "optimization":{
+
+ }
+ }
+}
+
diff --git a/test/csit/tests/optf-has/has/data/plan_with_short_distance_constraint.json b/test/csit/tests/optf-has/has/data/plan_with_short_distance_constraint.json
new file mode 100644
index 000000000..68a7e119b
--- /dev/null
+++ b/test/csit/tests/optf-has/has/data/plan_with_short_distance_constraint.json
@@ -0,0 +1,64 @@
+{
+ "name":"onap template with short distance constraint",
+ "template":{
+ "homing_template_version":"2017-10-10",
+ "parameters":{
+ "service_name":"Residential vCPE",
+ "service_id":"vcpe_service_id",
+ "customer_lat":25.395968,
+ "customer_long":-51.135344,
+ "physical_location":"DLLSTX233",
+ "REQUIRED_MEM":4,
+ "REQUIRED_DISK":100,
+ "pnf_id":"some_pnf_id"
+ },
+ "locations":{
+ "customer_loc":{
+ "latitude":{
+ "get_param":"customer_lat"
+ },
+ "longitude":{
+ "get_param":"customer_long"
+ }
+ }
+ },
+ "demands":{
+ "vG":[
+ {
+ "inventory_provider":"aai",
+ "inventory_type":"cloud"
+ }
+ ]
+ },
+ "constraints":{
+ "distance-vg":{
+ "type":"distance_to_location",
+ "demands":[
+ "vG"
+ ],
+ "properties":{
+ "distance":"< 1 km",
+ "location":"customer_loc"
+ }
+ }
+ },
+ "optimization":{
+ "minimize": {
+ "sum": [
+ {
+ "distance_between": [
+ "customer_loc",
+ "vG"
+ ]
+ },
+ {
+ "distance_between": [
+ "customer_loc",
+ "vG"
+ ]
+ }
+ ]
+ }
+ }
+ }
+}
diff --git a/test/csit/tests/optf-has/has/data/plan_with_wrong_distance_constraint.json b/test/csit/tests/optf-has/has/data/plan_with_wrong_distance_constraint.json
new file mode 100644
index 000000000..9f25c2dff
--- /dev/null
+++ b/test/csit/tests/optf-has/has/data/plan_with_wrong_distance_constraint.json
@@ -0,0 +1,63 @@
+{
+ "name":"onap template with wrong distance constraint",
+ "template":{
+ "homing_template_version":"2017-10-10",
+ "parameters":{
+ "service_name":"Residential vCPE",
+ "service_id":"vcpe_service_id",
+ "customer_lat":45.395968,
+ "customer_long":-71.135344,
+ "physical_location":"DLLSTX233",
+ "REQUIRED_MEM":4,
+ "REQUIRED_DISK":100,
+ "pnf_id":"some_pnf_id"
+ },
+ "locations":{
+ "customer_loc":{
+ "latitude":{
+ "get_param":"customer_lat"
+ },
+ "longitude":{
+ "get_param":"customer_long"
+ }
+ }
+ },
+ "demands":{
+ "vG":[
+ {
+ "inventory_provider":"aai",
+ "inventory_type":"cloud"
+ }
+ ]
+ },
+ "constraints":{
+ "distance-vg":{
+ "demands":[
+ "vG"
+ ],
+ "properties":{
+ "distance":"< 1 km",
+ "location":"customer_loc"
+ }
+ }
+ },
+ "optimization":{
+ "minimize": {
+ "sum": [
+ {
+ "distance_between": [
+ "customer_loc",
+ "vG"
+ ]
+ },
+ {
+ "distance_between": [
+ "customer_loc",
+ "vG"
+ ]
+ }
+ ]
+ }
+ }
+ }
+}
diff --git a/test/csit/tests/optf-has/has/data/plan_with_wrong_version.json b/test/csit/tests/optf-has/has/data/plan_with_wrong_version.json
index 9471fbf82..c0618bfbf 100644
--- a/test/csit/tests/optf-has/has/data/plan_with_wrong_version.json
+++ b/test/csit/tests/optf-has/has/data/plan_with_wrong_version.json
@@ -1,175 +1,41 @@
{
- "name": "onap optf has plan with wrong version",
- "template": {
- "conductor_template_version": "yyyy-mm-dd",
- "parameters": {
- "UCPEHOST": "chcil129snd",
- "CUSTOMER":"21014aa2-526b-11e6-beb8-9e71128cae77"
- },
- "locations": {
- "customer_loc": {
- "host_name": {
- "get_param": "UCPEHOST"
- }
- }
- },
- "demands": {
- "vHNPortalaaS_PRIMARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "HNPORTAL",
- "customer_id": {"get_param": "CUSTOMER"}
+ "name":"onap template with wrong version",
+ "template":{
+ "homing_template_version":"xxxx-yy-zz",
+ "parameters":{
+ "service_name":"Residential vCPE",
+ "service_id":"vcpe_service_id",
+ "customer_lat":45.395968,
+ "customer_long":-71.135344,
+ "physical_location":"DLLSTX233",
+ "REQUIRED_MEM":4,
+ "REQUIRED_DISK":100,
+ "pnf_id":"some_pnf_id"
},
- {
- "inventory_provider": "aai",
- "inventory_type": "cloud"
- }
- ],
- "vHNPortalaaS_SECONDARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "HNPORTAL",
- "customer_id": {"get_param": "CUSTOMER"}
+ "locations":{
+ "customer_loc":{
+ "latitude":{
+ "get_param":"customer_lat"
+ },
+ "longitude":{
+ "get_param":"customer_long"
+ }
+ }
},
- {
- "inventory_provider": "aai",
- "inventory_type": "cloud"
- }
- ],
- "vHNGWaaS_PRIMARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "HNGATEWAY",
- "customer_id": {"get_param": "CUSTOMER"}
+ "demands":{
+ "vG":[
+ {
+ "inventory_provider":"aai",
+ "inventory_type":"cloud"
+ }
+ ]
},
- {
- "inventory_provider": "aai",
- "inventory_type": "cloud"
- }
- ],
- "vHNGWaaS_SECONDARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "HNGATEWAY",
- "customer_id": {"get_param": "CUSTOMER"}
+ "constraints":{
+
},
- {
- "inventory_provider": "aai",
- "inventory_type": "cloud"
- }
- ],
- "vVIGaaS_PRIMARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "VVIG",
- "customer_id": {"get_param": "CUSTOMER"}
- }
- ],
- "vVIGaaS_SECONDARY_1": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "VVIG",
- "customer_id": {"get_param": "CUSTOMER"}
- }
- ],
- "vVIGaaS_PRIMARY_2": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "VVIG",
- "customer_id": {"get_param": "CUSTOMER"}
+ "optimization":{
+
}
- ],
- "vVIGaaS_SECONDARY_2": [
- {
- "inventory_provider": "aai",
- "inventory_type": "service",
- "service_type": "VVIG",
- "customer_id": {"get_param": "CUSTOMER"}
- }
- ]
- },
- "constraints": {
- "distance-vvig": {
- "type": "distance_to_location",
- "demands": [
- "vVIGaaS_SECONDARY_1",
- "vVIGaaS_PRIMARY_1"
- ],
- "properties": {
- "distance": "< 5000 km",
- "location": "customer_loc"
- }
- },
- "distance-vgw": {
- "type": "distance_to_location",
- "demands": [
- "vHNGWaaS_SECONDARY_1",
- "vHNGWaaS_PRIMARY_1"
- ],
- "properties": {
- "distance": "< 5000 km",
- "location": "customer_loc"
- }
- },
- "zone-vhngw": {
- "type": "zone",
- "demands": [
- "vHNGWaaS_SECONDARY_1",
- "vHNGWaaS_PRIMARY_1"
- ],
- "properties": {
- "qualifier": "different",
- "category": "complex"
- }
- },
- "zone-vhnportal": {
- "type": "zone",
- "demands": [
- "vHNPortalaaS_SECONDARY_1",
- "vHNPortalaaS_PRIMARY_1"
- ],
- "properties": {
- "qualifier": "different",
- "category": "complex"
- }
- }
- },
- "optimization": {
- "minimize": {
- "sum": [
- {
- "product": [
- 1,
- {
- "distance_between": [
- "customer_loc",
- "vVIGaaS_PRIMARY_1"
- ]
- }
- ]
- },
- {
- "product": [
- 1,
- {
- "distance_between": [
- "customer_loc",
- "vHNGWaaS_PRIMARY_1"
- ]
- }
- ]
- }
- ]
- }
- }
-},
- "timeout": 5,
- "limit": 3
+ }
}
+
diff --git a/test/csit/tests/optf-has/has/data/plan_without_demand_section.json b/test/csit/tests/optf-has/has/data/plan_without_demand_section.json
index 87a459d87..fe5d2fa65 100644
--- a/test/csit/tests/optf-has/has/data/plan_without_demand_section.json
+++ b/test/csit/tests/optf-has/has/data/plan_without_demand_section.json
@@ -1,93 +1,33 @@
{
- "name": "onap optf has plan with wrong version",
- "template": {
- "conductor_template_version": "2016-11-01",
- "parameters": {
- "UCPEHOST": "chcil129snd",
- "CUSTOMER":"21014aa2-526b-11e6-beb8-9e71128cae77"
- },
- "locations": {
- "customer_loc": {
- "host_name": {
- "get_param": "UCPEHOST"
- }
- }
- },
- "constraints": {
- "distance-vvig": {
- "type": "distance_to_location",
- "demands": [
- "vVIGaaS_SECONDARY_1",
- "vVIGaaS_PRIMARY_1"
- ],
- "properties": {
- "distance": "< 5000 km",
- "location": "customer_loc"
- }
- },
- "distance-vgw": {
- "type": "distance_to_location",
- "demands": [
- "vHNGWaaS_SECONDARY_1",
- "vHNGWaaS_PRIMARY_1"
- ],
- "properties": {
- "distance": "< 5000 km",
- "location": "customer_loc"
- }
- },
- "zone-vhngw": {
- "type": "zone",
- "demands": [
- "vHNGWaaS_SECONDARY_1",
- "vHNGWaaS_PRIMARY_1"
- ],
- "properties": {
- "qualifier": "different",
- "category": "complex"
- }
- },
- "zone-vhnportal": {
- "type": "zone",
- "demands": [
- "vHNPortalaaS_SECONDARY_1",
- "vHNPortalaaS_PRIMARY_1"
- ],
- "properties": {
- "qualifier": "different",
- "category": "complex"
- }
- }
- },
- "optimization": {
- "minimize": {
- "sum": [
- {
- "product": [
- 1,
- {
- "distance_between": [
- "customer_loc",
- "vVIGaaS_PRIMARY_1"
- ]
- }
- ]
- },
- {
- "product": [
- 1,
- {
- "distance_between": [
- "customer_loc",
- "vHNGWaaS_PRIMARY_1"
- ]
+ "name":"onap template without demand section",
+ "template":{
+ "homing_template_version":"2017-10-10",
+ "parameters":{
+ "service_name":"Residential vCPE",
+ "service_id":"vcpe_service_id",
+ "customer_lat":45.395968,
+ "customer_long":-71.135344,
+ "physical_location":"DLLSTX233",
+ "REQUIRED_MEM":4,
+ "REQUIRED_DISK":100,
+ "pnf_id":"some_pnf_id"
+ },
+ "locations":{
+ "customer_loc":{
+ "latitude":{
+ "get_param":"customer_lat"
+ },
+ "longitude":{
+ "get_param":"customer_long"
}
- ]
- }
- ]
- }
- }
-},
- "timeout": 5,
- "limit": 3
+ }
+ },
+ "constraints":{
+
+ },
+ "optimization":{
+
+ }
+ }
}
+
diff --git a/test/csit/tests/optf-has/has/optf_has_test.robot b/test/csit/tests/optf-has/has/optf_has_test.robot
index 4882c229f..62db10774 100644
--- a/test/csit/tests/optf-has/has/optf_has_test.robot
+++ b/test/csit/tests/optf-has/has/optf_has_test.robot
@@ -11,6 +11,8 @@ ${RESP_MESSAGE_WITHOUT_DEMANDS} Undefined Demand
#global variables
${generatedPlanId}
+${generatedAID}
+${resultStatus}
*** Test Cases ***
Check Cassandra Docker Container
@@ -114,6 +116,186 @@ Get Root Url
Log To Console body = ${resp.text}
Should Be Equal As Integers ${resp.status_code} 200
+Conductor AddHealthcheck Row Into Music
+ [Documentation] It sends a REST PUT request to Music to inject healthcheck plan
+ Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}healthcheck.json
+ &{headers}= Create Dictionary ns=conductor userId=conductor password=c0nduct0r Content-Type=application/json Accept=application/json
+ ${resp}= Put Request musicaas /MUSIC/rest/v2/keyspaces/conductor/tables/plans/rows?id=healthcheck data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Sleep 5s Wait Injection effectiveness
+
+Healthcheck
+ [Documentation] It sends a REST GET request to healthcheck url
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/healthcheck headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+SendPlanWithWrongVersion
+ [Documentation] It sends a POST request to conductor
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}plan_with_wrong_version.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ ${generatedPlanId}= Convert To String ${response_json['id']}
+ Set Global Variable ${generatedPlanId}
+ Log To Console generatedPlanId = ${generatedPlanId}
+ Should Be Equal As Integers ${resp.status_code} 201
+ Sleep 10s Wait Plan Resolution
+
+GetPlanWithWrongVersion
+ [Documentation] It sends a REST GET request to capture error
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ ${response_json} json.loads ${resp.content}
+ ${resultStatus}= Convert To String ${response_json['plans'][0]['status']}
+ Set Global Variable ${resultStatus}
+ Log To Console resultStatus = ${resultStatus}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Should Be Equal error ${resultStatus}
+
+SendPlanWithoutDemandSection
+ [Documentation] It sends a POST request to conductor
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}plan_without_demand_section.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ ${generatedPlanId}= Convert To String ${response_json['id']}
+ Set Global Variable ${generatedPlanId}
+ Log To Console generatedPlanId = ${generatedPlanId}
+ Should Be Equal As Integers ${resp.status_code} 201
+ Sleep 10s Wait Plan Resolution
+
+GetPlanWithoutDemandSection
+ [Documentation] It sends a REST GET request to capture error
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ ${response_json} json.loads ${resp.content}
+ ${resultStatus}= Convert To String ${response_json['plans'][0]['status']}
+ Set Global Variable ${resultStatus}
+ Log To Console resultStatus = ${resultStatus}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Should Be Equal error ${resultStatus}
+
+SendPlanWithWrongConstraint
+ [Documentation] It sends a POST request to conductor
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}plan_with_wrong_distance_constraint.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ ${generatedPlanId}= Convert To String ${response_json['id']}
+ Set Global Variable ${generatedPlanId}
+ Log To Console generatedPlanId = ${generatedPlanId}
+ Should Be Equal As Integers ${resp.status_code} 201
+ Sleep 10s Wait Plan Resolution
+
+GetPlanWithWrongConstraint
+ [Documentation] It sends a REST GET request to capture error
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ ${response_json} json.loads ${resp.content}
+ ${resultStatus}= Convert To String ${response_json['plans'][0]['status']}
+ Set Global Variable ${resultStatus}
+ Log To Console resultStatus = ${resultStatus}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Should Be Equal error ${resultStatus}
+
+
+SendPlanWithLatiAndLongi
+ [Documentation] It sends a POST request to conductor
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}plan_with_lati_and_longi.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ ${generatedPlanId}= Convert To String ${response_json['id']}
+ Set Global Variable ${generatedPlanId}
+ Log To Console generatedPlanId = ${generatedPlanId}
+ Should Be Equal As Integers ${resp.status_code} 201
+ Sleep 60s Wait Plan Resolution
+
+GetPlanWithLatiAndLongi
+ [Documentation] It sends a REST GET request to capture recommendations
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ ${response_json} json.loads ${resp.content}
+ ${resultStatus}= Convert To String ${response_json['plans'][0]['status']}
+ Set Global Variable ${resultStatus}
+ Log To Console resultStatus = ${resultStatus}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Should Be Equal done ${resultStatus}
+
+SendPlanWithShortDistanceConstraint
+ [Documentation] It sends a POST request to conductor
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ ${data}= Get Binary File ${CURDIR}${/}data${/}plan_with_short_distance_constraint.json
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ Log To Console body = ${resp.text}
+ ${response_json} json.loads ${resp.content}
+ ${generatedPlanId}= Convert To String ${response_json['id']}
+ Set Global Variable ${generatedPlanId}
+ Log To Console generatedPlanId = ${generatedPlanId}
+ Should Be Equal As Integers ${resp.status_code} 201
+ Sleep 60s Wait Plan Resolution
+
+GetPlanWithShortDistanceConstraint
+ [Documentation] It sends a REST GET request to capture recommendations
+ Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT}
+ &{headers}= Create Dictionary Content-Type=application/json Accept=application/json
+ ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers}
+ Log To Console *********************
+ Log To Console response = ${resp}
+ ${response_json} json.loads ${resp.content}
+ ${resultStatus}= Convert To String ${response_json['plans'][0]['status']}
+ Set Global Variable ${resultStatus}
+ Log To Console resultStatus = ${resultStatus}
+ Log To Console body = ${resp.text}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Should Be Equal not found ${resultStatus}
+
+
*** Keywords ***
diff --git a/test/csit/tests/policy/suite1/Policy-CSIT.robot b/test/csit/tests/policy/suite1/Policy-CSIT.robot
index fd5e23048..5909abd29 100644
--- a/test/csit/tests/policy/suite1/Policy-CSIT.robot
+++ b/test/csit/tests/policy/suite1/Policy-CSIT.robot
@@ -16,10 +16,12 @@ ${CREATE_CONFIG_VFW_TEMPLATE} ${CURDIR}/configpolicy_vFW_R1.template
${CREATE_CONFIG_VDNS_TEMPLATE} ${CURDIR}/configpolicy_vDNS_R1.template
${CREATE_CONFIG_VCPE_TEMPLATE} ${CURDIR}/configpolicy_vCPE_R1.template
${CREATE_OPS_VFW_TEMPLATE} ${CURDIR}/opspolicy_VFW_R1.template
+${CREATE_OOF_HPA_TEMPLATE} ${CURDIR}/oofpolicy_HPA_R1.template
${PUSH_POLICY_TEMPLATE} ${CURDIR}/pushpolicy.template
${CREATE_OPS_VDNS_TEMPLATE} ${CURDIR}/opspolicy_VDNS_R1.template
${DEL_POLICY_TEMPLATE} ${CURDIR}/deletepolicy.template
${GETCONFIG_TEMPLATE} ${CURDIR}/getconfigpolicy.template
+${GETOOF_TEMPLATE} ${CURDIR}/getoofpolicy.template
${CONFIG_POLICY_VFW_NAME} vFirewall
${CONFIG_POLICY_VFW_TYPE} MicroService
${CONFIG_POLICY_VDNS_NAME} vLoadBalancer
@@ -34,6 +36,8 @@ ${OPS_POLICY_VCPE_NAME} vCPE
${OPS_POLICY_VCPE_TYPE} BRMS_PARAM
${OPS_POLICY_VOLTE_NAME} VoLTE
${OPS_POLICY_VOLTE_TYPE} BRMS_PARAM
+${OOF_POLICY_HPA_NAME} HPA
+${OOF_POLICY_HPA_TYPE} Optimization
${file_path} ../testsuite/robot/assets/templates/ControlLoopDemo__closedLoopControlName.drl
${RESOURCE_PATH_UPLOAD} /pdp/api/policyEngineImport?importParametersJson=%7B%22serviceName%22%3A%22Manyu456%22%2C%20%22serviceType%22%3A%22BRMSPARAM%22%7D
${CREATE_OPS_VCPE_TEMPLATE} ${CURDIR}/opspolicy_vCPE_R1.template
@@ -75,6 +79,12 @@ VOLTE Ops Policy
${OPS_POLICY_VOLTE_NAME}= Create Ops VOLTE Policy
Push Ops Policy ${OPS_POLICY_VOLTE_NAME} ${OPS_POLICY_VOLTE_TYPE}
#VOLTE Policy Tests
+
+HPA OOF Policy
+ ${OOF_POLICY_HPA_NAME}= Create OOF HPA Policy
+ Push Config Policy ${OOF_POLICY_HPA_NAME} ${OOF_POLICY_HPA_TYPE}
+ #HPA Policy Tests
+
VFW Get Configs Policy
Sleep 5s
Get Configs VFW Policy
@@ -87,6 +97,10 @@ VCPE Get Configs Policy
Sleep 5s
Get Configs VCPE Policy
+HPA Get OOF Policy
+ Sleep 5s
+ Get OOF HPA Policy
+
*** Keywords ***
VFW Policy Tests
@@ -114,6 +128,11 @@ VOLTE Policy Tests
${OPS_POLICY_VOLTE_NAME}= Create Ops VOLTE Policy
Push Ops Policy ${OPS_POLICY_VOLTE_NAME} ${OPS_POLICY_VOLTE_TYPE}
+HPA Policy Tests
+ ${OOF_POLICY_HPA_NAME}= Create OOF HPA Policy
+ Push Config Policy ${OOF_POLICY_HPA_NAME} ${OOF_POLICY_HPA_TYPE}
+ Get OOF HPA Policy
+
Get Configs VFW Policy
[Documentation] Get Config Policy for VFW
${getconfigpolicy}= Catenate .*${CONFIG_POLICY_VFW_NAME}*
@@ -122,6 +141,26 @@ Get Configs VFW Policy
${get_resp} = Run Policy Get Configs Request ${RESOURCE_PATH_GET_CONFIG} ${output}
Should Be Equal As Strings ${get_resp.status_code} 200
+Create OOF HPA Policy
+ [Documentation] Create OOF Policy
+ ${randompolicyname} = Create Policy Name
+ ${policyname1}= Catenate com.${randompolicyname}_HPA
+ ${OOF_POLICY_HPA_NAME}= Set Test Variable ${policyname1}
+ ${hpapolicy}= Create Dictionary policy_name=${policyname1}
+ ${output} = Fill JSON Template File ${CREATE_OOF_HPA_TEMPLATE} ${hpapolicy}
+ ${put_resp} = Run Policy Put Request ${RESOURCE_PATH_CREATE} ${output}
+ Log ${put_resp}
+ Should Be Equal As Strings ${put_resp.status_code} 200
+ [Return] ${policyname1}
+
+Get OOF HPA Policy
+ [Documentation] Get OOF Policy for HPA
+ ${gethpapolicy}= Catenate .*${OOF_POLICY_HPA_NAME}*
+ ${hpapolicy_name}= Create Dictionary oof_policy_name=${gethpapolicy}
+ ${output} = Fill JSON Template File ${GETOOF_TEMPLATE} ${hpapolicy_name}
+ ${get_resp} = Run Policy Get Configs Request ${RESOURCE_PATH_GET_CONFIG} ${output}
+ Should Be Equal As Strings ${get_resp.status_code} 200
+
Create Config VFW Policy
[Documentation] Create Config Policy
${randompolicyname} = Create Policy Name
@@ -186,6 +225,15 @@ Delete Config Policy
${put_resp} = Run Policy Delete Request ${RESOURCE_PATH_CREATE_DELETE} ${output}
Should Be Equal As Strings ${put_resp.status_code} 200
+Delete OOF Policy
+ [Documentation] Delete OOF Policy
+ [Arguments] ${policy_name}
+ ${policyname3}= Catenate com.Config_OOF_${policy_name}.1.xml
+ ${dict}= Create Dictionary policy_name=${policyname3}
+ ${output} = Fill JSON Template ${DEL_POLICY_TEMPLATE} ${dict}
+ ${put_resp} = Run Policy Delete Request ${RESOURCE_PATH_CREATE_DELETE} ${output}
+ Should Be Equal As Strings ${put_resp.status_code} 200
+
Get Configs VDNS Policy
[Documentation] Get Config Policy for VDNS
${getconfigpolicy}= Catenate .*${CONFIG_POLICY_VDNS_NAME}*
@@ -267,4 +315,4 @@ Upload DRL file
# ${files2} = {'file': open('../testsuite/robot/assets/templates/ControlLoopDemo__closedLoopControlName.drl', 'rb')}
# ${files}= Create Dictionary file ${file_data}
${put_resp} = Run Policy Post form Request ${RESOURCE_PATH_UPLOAD} ${files}
- Should Be Equal As Strings ${put_resp.status_code} 200 \ No newline at end of file
+ Should Be Equal As Strings ${put_resp.status_code} 200
diff --git a/test/csit/tests/policy/suite1/getoofpolicy.template b/test/csit/tests/policy/suite1/getoofpolicy.template
new file mode 100644
index 000000000..37fe0471b
--- /dev/null
+++ b/test/csit/tests/policy/suite1/getoofpolicy.template
@@ -0,0 +1,6 @@
+{
+ "configAttributes" : {
+ },
+ "policyName" : "${oof_policy_name}",
+ "unique" : false
+}
diff --git a/test/csit/tests/policy/suite1/oofpolicy_HPA_R1.template b/test/csit/tests/policy/suite1/oofpolicy_HPA_R1.template
new file mode 100644
index 000000000..3a170b207
--- /dev/null
+++ b/test/csit/tests/policy/suite1/oofpolicy_HPA_R1.template
@@ -0,0 +1,6 @@
+{
+ "configBody": "{\"service\":\"hpaPolicy\",\"policyName\":\"testWorkingTOSCA\",\"description\":\"testing\",\"templateVersion\":\"OpenSource.version.1\",\"version\":\"CSIT\",\"priority\":\"5\",\"riskType\":\"SampleRiskType\",\"riskLevel\":\"3\",\"guard\":\"False\",\"content\":{\"identity\":\"testIdentity\",\"policyScope\":[\"test1\",\"test2\",\"test3\"],\"policyType\":\"hpaPolicy\",\"resources\":\"testResources1\",\"flavorFeatures\":[{\"flavorLabel\":\"testFlavor\",\"flavorProperties\":[{\"score\":\"testScore\",\"featureAttributes\":[{\"unit\":\"testUnit\",\"attribute\":\"teatFeatureAttribute\",\"value\":\"testValue\",\"operator\":\"any\"}],\"mandatory\":\"testMandatory\",\"hpaFeature\":\"testHAPFeature\",\"architecture\":\"testArch\"}]}]}}",
+ "policyConfigType": "Optimization",
+ "policyName": "${policy_name}",
+ "onapName": "OOF"
+}
diff --git a/test/csit/tests/portal-sdk/testsuites/test1.robot b/test/csit/tests/portal-sdk/testsuites/test1.robot
index 84579d017..00714024f 100644
--- a/test/csit/tests/portal-sdk/testsuites/test1.robot
+++ b/test/csit/tests/portal-sdk/testsuites/test1.robot
@@ -6,15 +6,15 @@ Library XvfbRobot
*** Variables ***
-${PORTAL_URL} http://portal.api.simpledemo.onap.org:8989
-${PORTAL_ENV} /ONAPPORTAL
+${PORTAL_URL} http://portal.api.simpledemo.onap.org:8990
+${PORTAL_ENV} /ONAPPORTALSDK
${PORTAL_LOGIN_URL} ${PORTAL_URL}${PORTAL_ENV}/login.htm
-${PORTAL_HOME_PAGE} ${PORTAL_URL}${PORTAL_ENV}/applicationsHome
+${PORTAL_HOME_PAGE} ${PORTAL_URL}${PORTAL_ENV}/welcome
${PORTAL_MICRO_ENDPOINT} ${PORTAL_URL}${PORTAL_ENV}/commonWidgets
${PORTAL_HOME_URL} ${PORTAL_URL}${PORTAL_ENV}/applicationsHome
${GLOBAL_APPLICATION_ID} robot-functional
${GLOBAL_PORTAL_ADMIN_USER} demo
-${GLOBAL_PORTAL_ADMIN_PWD} demo123456!
+${GLOBAL_PORTAL_ADMIN_PWD} demo
${GLOBAL_SELENIUM_BROWSER} chrome
${GLOBAL_SELENIUM_BROWSER_CAPABILITIES} Create Dictionary
${GLOBAL_SELENIUM_DELAY} 0
@@ -36,115 +36,105 @@ Portal admin Login To Portal GUI
Set Selenium Speed ${GLOBAL_SELENIUM_DELAY}
Set Browser Implicit Wait ${GLOBAL_SELENIUM_BROWSER_IMPLICIT_WAIT}
Log Logging in to ${PORTAL_URL}${PORTAL_ENV}
- # Handle Proxy Warning
+ # Handle Proxy Warning
Title Should Be Login
- Input Text xpath=//input[@ng-model='loginId'] ${GLOBAL_PORTAL_ADMIN_USER}
- Input Password xpath=//input[@ng-model='password'] ${GLOBAL_PORTAL_ADMIN_PWD}
- Click Link xpath=//a[@id='loginBtn']
- Wait Until Page Contains Element xpath=//img[@alt='Onap Logo'] ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
+ Input Text xpath=//input[@id='loginId'] ${GLOBAL_PORTAL_ADMIN_USER}
+ Input Password xpath=//input[@id='password'] ${GLOBAL_PORTAL_ADMIN_PWD}
+ Click Element //*[@id="loginBtn"]
+ Wait Until Page Contains Element xpath=//img[@src='app/fusionapp/icons/logo_onap_transbg.png'] ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
Log Logged in to ${PORTAL_URL}${PORTAL_ENV}
-
-Portal Admin Navigation Application Link Tab
- [Documentation] Logs into Portal GUI as Portal admin
- Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
- Go To ${PORTAL_HOME_PAGE}
- Dismiss Alert accept=false
- #Scroll Element Into View xpath=//span[@id='tab-Home']
- #Click Element xpath=//span[@id='tab-Home']
+SDKPortalAdmin Navigation Application Link Tab
+ [Documentation] Logs into Portal GUI as Portal admin
+ Comment Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
+ Comment Go To ${PORTAL_HOME_PAGE}
+ Comment Dismiss Alert accept=false
+ #Scroll Element Into View xpath=//span[@id='tab-Home']
+ #Click Element xpath=//span[@id='tab-Home']
#Click Element xpath=(//span[@id='tab-xDemo-App']/following::i[@class='ion-close-round'])[1]
- Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
-
-
-
-Validate SDK Sub Menu
- [Documentation] Logs into SDK GUI as Portal admin
- Page Should Contain Home
- Page Should Contain Sample Pages
- Page Should Contain Reports
- Page Should Contain Profile
- Page Should Contain Admin
-
-Click Sample Pages and validate sub Menu
- [Documentation] Click Sample Pages
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Sample-Pages']
- Element Text Should Be xpath=//a[@title='Collaboration'] Collaboration
- Element Text Should Be xpath=//a[@title='Notebook'] Notebook
- Click Link xpath=//a[contains(@title,'Collaboration')]
- Page Should Contain User List
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Sample-Pages']
- Click Link xpath=//a[contains(@title,'Notebook')]
- Element Text Should Be xpath=//h1[contains(.,'Notebook')] Notebook
-
-Click Reports and validate sub Menu
- [Documentation] Click Reports Tab
+ Comment Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
+
+Validate SDK Sub Menu
+ [Documentation] Logs into SDK GUI as Portal admin
+ Page Should Contain Home
+ Page Should Contain Sample Pages
+ Page Should Contain Reports
+ Page Should Contain Profile
+ Page Should Contain Admin
+
+#Click Sample Pages and validate sub Menu
+ #[Documentation] Click Sample Pages
+ #Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ #Click Link xpath=//a[@id='parent-item-Sample-Pages']
+ #Element Text Should Be xpath=//a[@title='Collaboration'] Collaboration
+ #Element Text Should Be xpath=//a[@title='Notebook'] Notebook
+ #Click Link xpath=//a[contains(@title,'Collaboration')]
+ #Page Should Contain User List
+ #Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ #Click Link xpath=//a[@id='parent-item-Sample-Pages']
+ #Click Link xpath=//a[contains(@title,'Notebook')]
+ #Element Text Should Be xpath=//h1[contains(.,'Notebook')] Notebook
+
+Click Reports and validate sub Menu
+ [Documentation] Click Reports Tab
#Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Reports']
- Element Text Should Be xpath=//a[@title='All Reports'] All Reports
- Element Text Should Be xpath=//a[@title='Create Reports'] Create Reports
- Click Link xpath=//a[contains(@title,'All Reports')]
- Page Should Contain Report search
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Reports']
- Click Link xpath=//a[contains(@title,'Create Reports')]
- Page Should Contain Report Wizard
-
-Click Profile and validate sub Menu
- [Documentation] Click Profile Tab
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Profile']
- Element Text Should Be xpath=//a[@title='Search'] Search
- Element Text Should Be xpath=//a[@title='Self'] Self
- Click Link xpath=//a[contains(@title,'Search')]
- Page Should Contain Profile Search
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Profile']
- Click Link xpath=//a[contains(@title,'Self')]
- Page Should Contain Self Profile Detail
-
-
-Click Admin and validate sub Menu
- [Documentation] Click Admin Tab
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Admin']
- Element Text Should Be xpath=//a[@title='Roles'] Roles
- Element Text Should Be xpath=//a[@title='Role Functions'] Role Functions
- Element Text Should Be xpath=//a[@title='Cache Admin'] Cache Admin
- Element Text Should Be xpath=//a[@title='Menus'] Menus
- Element Text Should Be xpath=//a[@title='Usage'] Usage
- Click Link xpath=//a[contains(@title,'Roles')]
- Page Should Contain Roles
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Admin']
- Click Link xpath=//a[contains(@title,'Role Function')]
- Page Should Contain Role Function
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=.//a[@id='parent-item-Admin']
- #Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Admin']
- Click Link xpath=//a[contains(@title,'Cache Admin')]
- Page Should Contain Cache Regions
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=.//a[@id='parent-item-Admin']
- Click Link xpath=//a[@id='parent-item-Admin']
- Click Link xpath=//a[contains(@title,'Menus')]
- Page Should Contain Admin Menu Items
- Select frame xpath=.//*[@id='tabframe-xDemo-App']
- Click Link xpath=//a[@id='parent-item-Admin']
- Click Link xpath=//a[@id='parent-item-Admin']
- Click Link xpath=//a[contains(@title,'Usage')]
- Page Should Contain Current Usage
-
-
-Teardown
- [Documentation] Close All Open browsers
- Close All Browsers
-
+ Click Link xpath=//a[@id='parent-item-Reports']
+ Element Text Should Be xpath=//a[@title='All Reports'] All Reports
+ Element Text Should Be xpath=//a[@title='Create Reports'] Create Reports
+ Click Link xpath=//a[contains(@title,'All Reports')]
+ Page Should Contain Report search
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Reports']
+ Click Link xpath=//a[contains(@title,'Create Reports')]
+ Page Should Contain Report Wizard
+
+Click Profile and validate sub Menu
+ [Documentation] Click Profile Tab
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Profile']
+ Element Text Should Be xpath=//a[@title='Search'] Search
+ Element Text Should Be xpath=//a[@title='Self'] Self
+ Click Link xpath=//a[contains(@title,'Search')]
+ Page Should Contain Profile Search
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Profile']
+ Click Link xpath=//a[contains(@title,'Self')]
+ Page Should Contain Self Profile Detail
-
+Click Admin and validate sub Menu
+ [Documentation] Click Admin Tab
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Element Text Should Be xpath=//a[@title='Roles'] Roles
+ Element Text Should Be xpath=//a[@title='Role Functions'] Role Functions
+ Element Text Should Be xpath=//a[@title='Cache Admin'] Cache Admin
+ Element Text Should Be xpath=//a[@title='Menus'] Menus
+ Element Text Should Be xpath=//a[@title='Usage'] Usage
+ Click Link xpath=//a[contains(@title,'Roles')]
+ Page Should Contain Roles
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Click Link xpath=//a[contains(@title,'Role Function')]
+ Page Should Contain Role Function
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=.//a[@id='parent-item-Admin']
+ #Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Click Link xpath=//a[contains(@title,'Cache Admin')]
+ Page Should Contain Cache Regions
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=.//a[@id='parent-item-Admin']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Click Link xpath=//a[contains(@title,'Menus')]
+ Page Should Contain Admin Menu Items
+ Comment Select frame xpath=.//*[@id='tabframe-xDemo-App']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Click Link xpath=//a[@id='parent-item-Admin']
+ Click Link xpath=//a[contains(@title,'Usage')]
+ Page Should Contain Current Usage
-
+Teardown
+ [Documentation] Close All Open browsers
+ Close All Browsers
*** Keywords ***
diff --git a/test/csit/tests/portal/testsuites/test1.robot b/test/csit/tests/portal/testsuites/test1.robot
index bbbe5172f..90aa10788 100644
--- a/test/csit/tests/portal/testsuites/test1.robot
+++ b/test/csit/tests/portal/testsuites/test1.robot
@@ -62,6 +62,7 @@ ${jira} jira
${RESOURCE_PATH} ONAPPORTAL/auxapi/ticketevent
${portal_Template} ${CURDIR}/portal.template
+${Result} FALSE
*** Test Cases ***
@@ -72,133 +73,128 @@ Portal Health Check
Login into Portal URL
Portal admin Login To Portal GUI
-Portal R1 Release
- [Documentation] ONAP Portal R1 functionality test
- Notification on ONAP Portal
- Portal Application Account Management validation
+# Portal R1 Release
+ # [Documentation] ONAP Portal R1 functionality test
+ # Notification on ONAP Portal
+ # Portal Application Account Management validation
Portal R1 Release for AAF
[Documentation] ONAP Portal R1 functionality for AAF test
Portal AAF new fields
-Create Microse service onboarding
- Portal admin Microservice Onboarding
+#Create Microse service onboarding
+ #Portal admin Microservice Onboarding
+#Delete Microse service
+ #Portal admin Microservice Delete
-Create Widget for all users
- Portal Admin Create Widget for All users
-
-Delete Widget for all users
- Portal Admin Delete Widget for All users
-
-Create Widget for Application Roles
- Portal Admin Create Widget for Application Roles
-
-Delete Widget for Application Roles
- Portal Admin Delete Widget for Application Roles
+#Create Widget for all users
+ #Portal Admin Create Widget for All users
+
+# Delete Widget for all users
+ # Portal Admin Delete Widget for All users
+
+#Create Widget for Application Roles
+ #Portal Admin Create Widget for Application Roles
+
+#Delete Widget for Application Roles
+ #Portal Admin Delete Widget for Application Roles
-Validate Functional Top Menu Get Access
- Functional Top Menu Get Access
+#Validate Functional Top Menu Get Access
+ #Functional Top Menu Get Access
-Validate Functional Top Menu Contact Us
- Functional Top Menu Contact Us
+#Validate Functional Top Menu Contact Us
+ #Functional Top Menu Contact Us
-Edit Functional Menu
- Portal admin Edit Functional menu
+#Edit Functional Menu
+ #Portal admin Edit Functional menu
-Broadbond Notification functionality
- ${AdminBroadCastMsg}= Portal Admin Broadcast Notifications
- set global variable ${AdminBroadCastMsg}
+# Broadbond Notification functionality
+ # ${AdminBroadCastMsg}= Portal Admin Broadcast Notifications
+ # set global variable ${AdminBroadCastMsg}
-
-Category Notification functionality
- ${AdminCategoryMsg}= Portal Admin Category Notifications
- set global variable ${AdminCategoryMsg}
-
-Create a Test user for Application Admin -Test
- Portal admin Add Application admin User New user -Test
+# Category Notification functionality
+ # ${AdminCategoryMsg}= Portal Admin Category Notifications
+ # set global variable ${AdminCategoryMsg}
+
+#Create a Test user for Application Admin -Test
+ #Portal admin Add Application admin User New user -Test
-Create a Test User for Apllication Admin
- Portal admin Add Application admin User New user
+#Create a Test User for Apllication Admin
+ #Portal admin Add Application admin User New user
-Add Application Admin for Existing User Test user
- Portal admin Add Application Admin Exiting User -APPDEMO
+#Add Application Admin for Existing User Test user
+ #Portal admin Add Application Admin Exiting User -APPDEMO
-Create a Test user for Standared User
- Portal admin Add Standard User New user
+#Create a Test user for Standared User
+ #Portal admin Add Standard User New user
-Add Application Admin for Exisitng User
- Portal admin Add Application Admin Exiting User
+#Add Application Admin for Exisitng User
+ #Portal admin Add Application Admin Exiting User
-Delete Application Admin for Exisitng User
- Portal admin Delete Application Admin Existing User
+#Delete Application Admin for Exisitng User
+ #Portal admin Delete Application Admin Existing User
-Add Standard User Role for Existing user
- Portal admin Add Standard User Existing user
+# Add Standard User Role for Existing user
+ # Portal admin Add Standard User Existing user
-Edit Standard User Role for Existing user
- Portal admin Edit Standard User Existing user
+# Edit Standard User Role for Existing user
+ # Portal admin Edit Standard User Existing user
-Delete Standard User Role for Existing user
- Portal admin Delete Standard User Existing user
-
-
-
-
+#Delete Standard User Role for Existing user
+ #Portal admin Delete Standard User Existing user
+
Logout from Portal GUI as Portal Admin
Portal admin Logout from Portal GUI
-
-Login To Portal GUI as APP Admin
- Application admin Login To Portal GUI
-
-
+# Application Admin user Test cases
+
+#Login To Portal GUI as APP Admin
+ #Application admin Login To Portal GUI
+
##Navigate Functional Link as APP Admin
## Application Admin Navigation Functional Menu
-Add Standard User Role for Existing user as APP Admin
- Application admin Add Standard User Existing user
+# Add Standard User Role for Existing user as APP Admin
+ # Application admin Add Standard User Existing user
-Edit Standard User Role for Existing user as APP Admin
- Application admin Edit Standard User Existing user
+# Edit Standard User Role for Existing user as APP Admin
+ # Application admin Edit Standard User Existing user
-Delete Standard User Role for Existing user as APP Admin
- Application admin Delete Standard User Existing user
+# Delete Standard User Role for Existing user as APP Admin
+ # Application admin Delete Standard User Existing user
-#Navigate Application Link as APP Admin
-# Application Admin Navigation Application Link Tab
+# #Navigate Application Link as APP Admin
+# # Application Admin Navigation Application Link Tab
-Logout from Portal GUI as APP Admin
- Application admin Logout from Portal GUI
+#Logout from Portal GUI as APP Admin
+ #Application admin Logout from Portal GUI
+#Standard User Test cases
-Login To Portal GUI as Standared User
- Standared user Login To Portal GUI
+#Login To Portal GUI as Standared User
+ #Standared user Login To Portal GUI
#Navigate Application Link as Standared User
# Standared user Navigation Application Link Tab
##Navigate Functional Link as Standared User
## Standared user Navigation Functional Menu
+
-Broadcast Notifications Standared user
- Standared user Broadcast Notifications ${AdminBroadCastMsg}
+# Broadcast Notifications Standared user
+ # Standared user Broadcast Notifications ${AdminBroadCastMsg}
-Category Notifications Standared user
- Standared user Category Notifications ${AdminCategoryMsg}
+# Category Notifications Standared user
+ # Standared user Category Notifications ${AdminCategoryMsg}
Teardown
[Documentation] Close All Open browsers
Close All Browsers
-
-
-
-
-
*** Keywords ***
Setup Browser
@@ -296,6 +292,8 @@ Portal admin Add Application Admin Exiting User
Click Button xpath=//input[@value='Select application']
Scroll Element Into View xpath=(//input[@value='Select application']/following::*[contains(text(),'xDemo App' )])[1]
Click Element xpath=(//li[contains(.,'xDemo App' )])[2]
+# Scroll Element Into View xpath=(//input[@value='Select application']/following::*[contains(text(),'Default' )])[1]
+# Click Element xpath=(//li[contains(.,'Default' )])[2]
#Select From List xpath=(//input[@value='Select application']/following::*[contains(text(),'xDemo App')])[1] xDemo App
Click Button xpath=//button[@id='div-updateAdminAppsRoles']
Click Element xpath=//button[@id='admin-div-ok-button']
@@ -303,6 +301,7 @@ Portal admin Add Application Admin Exiting User
Get Selenium Implicit Wait
Click Link xpath=//a[@aria-label='Admins']
Click Element xpath=//input[@id='dropdown1']
+# Click Element xpath=//li[contains(.,'Default' )]
Click Element xpath=//li[contains(.,'xDemo App' )]
Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
Table Column Should Contain xpath=//*[@table-data='admins.adminsTableData'] 1 ${Existing_User}
@@ -312,9 +311,16 @@ Portal admin Add Application Admin Exiting User
Portal admin Delete Application Admin Existing User
[Documentation] Naviage to Admins tab
+ Wait Until Element Is Visible xpath=//a[@title='Admins'] ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
+ Click Link xpath=//a[@title='Admins']
+ Wait Until Element Is Visible xpath=//h1[contains(.,'Admins')] ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
+ Page Should Contain Admins
+ Click Button xpath=//button[@ng-click='toggleSidebar()']
+ Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
Click Element xpath=(//span[contains(.,'portal')] )[1]
#Click Element xpath=(//span[contains(.,'demo')] )[1]
Click Element xpath=//*[@id='select-app-xDemo-App']/following::i[@id='i-delete-application']
+# Click Element xpath=//*[@id='select-app-Default']/following::i[@id='i-delete-application']
Click Element xpath=//button[@id='div-confirm-ok-button']
Click Button xpath=//button[@id='div-updateAdminAppsRoles']
Click Element xpath=//button[@id='admin-div-ok-button']
@@ -340,6 +346,21 @@ Portal admin Add Application admin User New user
Input Text xpath=//input[@ng-model='searchUsers.newUser.loginPwd'] ${App_Loginpwd}
Input Text xpath=//input[@ng-model='searchUsers.newUser.loginPwdCheck'] ${App_LoginPwdCheck}
Click Button xpath=//button[@ng-click='searchUsers.addNewUserFun()']
+
+ ${Result}= Get Matching XPath Count xpath=//*[contains(text(),'User with same loginId already exists')]
+
+ #log ${Result}
+ #${type_result}= Evaluate type(${Result})
+ #log ${type_result}
+
+ Run Keyword if '${Result}'== 0 AdminUser does not exist already
+ ... ELSE Goto Home Image
+ Set Selenium Implicit Wait 3000
+
+Goto Home Image
+ Click Image xpath=//img[@alt='Onap Logo']
+
+AdminUser does not exist already
Click Button xpath=//button[@id='next-button']
#Scroll Element Into View xpath=//div[@id='div-app-name-dropdown-xDemo-App']
Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
@@ -372,6 +393,18 @@ Portal admin Add Standard User New user
Input Text xpath=//input[@ng-model='searchUsers.newUser.loginPwd'] ${Sta_Loginpwd}
Input Text xpath=//input[@ng-model='searchUsers.newUser.loginPwdCheck'] ${Sta_LoginPwdCheck}
Click Button xpath=//button[@ng-click='searchUsers.addNewUserFun()']
+
+ ${Result}= Get Matching XPath Count xpath=//*[contains(text(),'User with same loginId already exists')]
+
+ #log ${Result}
+ #${type_result}= Evaluate type(${Result})
+ #log ${type_result}
+
+ Run Keyword if '${Result}'== 0 StaUser does not exist already
+ ... ELSE Goto Home Image
+ Set Selenium Implicit Wait 3000
+
+StaUser does not exist already
Click Button xpath=//button[@id='next-button']
#Scroll Element Into View xpath=//div[@id='div-app-name-dropdown-xDemo-App']
Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
@@ -455,13 +488,7 @@ Portal admin Add Application Admin Exiting User -APPDEMO
Table Column Should Contain xpath=//*[@table-data='admins.adminsTableData'] 1 ${App_First_Name}
Click Image xpath=//img[@alt='Onap Logo']
Set Selenium Implicit Wait 3000
-
-
-
-
-
-
-
+
Portal admin Add Standard User Existing user
[Documentation] Naviage to Users tab
Click Link xpath=//a[@title='Users']
@@ -472,10 +499,10 @@ Portal admin Add Standard User Existing user
Click Button xpath=//button[@id='button-search-users']
Click Element xpath=//span[@id='result-uuid-0']
Click Button xpath=//button[@id='next-button']
- Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
- Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
- #Click Element xpath=//div[@id='div-app-name-dropdown-xDemo-App']
- #Click Element xpath=//div[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
+# Click Element xpath=//*[@id='div-app-name-dropdown-Default']
+# Click Element xpath=//*[@id='div-app-name-Default']/following::input[@id='Standard-User-checkbox']
+ Click Element xpath=//div[@id='div-app-name-dropdown-xDemo-App']
+ Click Element xpath=//div[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
Set Selenium Implicit Wait 3000
Click Button xpath=//button[@id='new-user-save-button']
Set Selenium Implicit Wait 3000
@@ -487,19 +514,24 @@ Portal admin Add Standard User Existing user
Go To ${PORTAL_HOME_PAGE}
Click Link xpath=//a[@title='Users']
Click Element xpath=//input[@id='dropdown1']
- Click Element xpath=//li[contains(.,'xDemo App')]
- #Click Element xpath=//li[contains(.,'XDemo App')]
+# Click Element xpath=//li[contains(.,'Default')]
+ Click Element xpath=//li[contains(.,'XDemo App')]
Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
- Element Text Should Be xpath=(.//*[@id='rowheader_t1_0'])[2] Standard User
-
-
-
+ Element Text Should Be xpath=(.//*[@id='rowheader_t1_0'])[2] Standard User
+ Set Selenium Implicit Wait 3000
+
Portal admin Edit Standard User Existing user
[Documentation] Naviage to Users tab
Click Element xpath=(.//*[@id='rowheader_t1_0'])[2]
+# Click Element xpath=//*[@id='div-app-name-dropdown-Default']
+# Click Element xpath=//*[@id='div-app-name-Default']/following::input[@id='Standard-User-checkbox']
+# Click Element xpath=//*[@id='div-app-name-Default']/following::input[@id='Portal-Notification-Admin-checkbox']
Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
- Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='System-Administrator-checkbox']
+ Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Portal-Notification-Admin-checkbox']
+# Click Element xpath=//*[@id='div-app-name-dropdown-SDC']
+# Click Element xpath=//*[@id='div-app-name-SDC']/following::input[@id='Standard-User-checkbox']
+# Click Element xpath=//*[@id='div-app-name-SDC']/following::input[@id='Portal-Notification-Admin-checkbox']
Set Selenium Implicit Wait 3000
Click Button xpath=//button[@id='new-user-save-button']
Set Selenium Implicit Wait 3000
@@ -508,22 +540,24 @@ Portal admin Edit Standard User Existing user
#Click Element xpath=//li[contains(.,'xDemo App')]
Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
Element Text Should Be xpath=(.//*[@id='rowheader_t1_0'])[2] System Administrator
-
+ Set Selenium Implicit Wait 3000
Portal admin Delete Standard User Existing user
[Documentation] Naviage to Users tab
Click Element xpath=(.//*[@id='rowheader_t1_0'])[2]
+# Scroll Element Into View xpath=//*[@id='div-app-name-Default']/following::*[@id='app-item-delete'][1]
+# Click Element xpath=//*[@id='div-app-name-Default']/following::*[@id='app-item-delete'][1]
Scroll Element Into View xpath=//*[@id='div-app-name-xDemo-App']/following::*[@id='app-item-delete'][1]
Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::*[@id='app-item-delete'][1]
+# Scroll Element Into View xpath=//*[@id='div-app-name-SDC']/following::*[@id='app-item-delete'][1]
+# Click Element xpath=//*[@id='div-app-name-SDC']/following::*[@id='app-item-delete'][1]
Click Element xpath=//button[@id='div-confirm-ok-button']
Click Button xpath=//button[@id='new-user-save-button']
#Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
- #Is Element Visible xpath=(//*[contains(.,'Portal')] )[2]
- Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] portal
- #Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] demo
-
-
-
+ #Is Element Visible xpath=(//*[contains(.,'Portal')] )[2]
+ Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] Portal
+ #Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] demo
+ Set Selenium Implicit Wait 3000
Functional Top Menu Get Access
[Documentation] Naviage to Support tab
@@ -531,7 +565,7 @@ Functional Top Menu Get Access
Mouse Over xpath=//*[contains(text(),'Get Access')]
Click Link xpath=//a[contains(.,'Get Access')]
Element Text Should Be xpath=//h1[contains(.,'Get Access')] Get Access
-
+ Set Selenium Implicit Wait 3000
Functional Top Menu Contact Us
[Documentation] Naviage to Support tab
@@ -540,7 +574,7 @@ Functional Top Menu Contact Us
Click Link xpath=//a[contains(.,'Contact Us')]
Element Text Should Be xpath=//h1[contains(.,'Contact Us')] Contact Us
Click Image xpath=//img[@alt='Onap Logo']
-
+ Set Selenium Implicit Wait 3000
Portal admin Edit Functional menu
[Documentation] Naviage to Edit Functional menu tab
@@ -583,7 +617,7 @@ Portal admin Edit Functional menu
Mouse Over xpath=//*[contains(text(),'Design')]
Set Selenium Implicit Wait 3000
Element Should Not Contain xpath=(.//*[contains(.,'Design')]/following::ul[1])[1] ONAP Test
-
+ Set Selenium Implicit Wait 3000
Portal admin Microservice Onboarding
@@ -605,11 +639,32 @@ Portal admin Microservice Onboarding
Click Button xpath=//button[@id='microservice-details-save-button']
Table Column Should Contain xpath=//*[@table-data='serviceList'] 1 Test Microservice
#Element Text Should Be xpath=//*[@table-data='serviceList'] Test Microservice
-
-
-
+ Set Selenium Implicit Wait 3000
+
+Portal admin Microservice Delete
+ [Documentation] Naviage to Edit Functional menu tab
+ Click Link xpath=//a[@title='Microservice Onboarding']
+ Click Button xpath=//button[@id='microservice-onboarding-button-add']
+ Input Text xpath=//input[@name='name'] TestMS
+ Input Text xpath=//*[@name='desc'] TestMS
+ Click Element xpath=//input[@id='microservice-details-input-app']
+ Scroll Element Into View xpath=//li[contains(.,'xDemo App')]
+ Click Element xpath=//li[contains(.,'xDemo App')]
+ Click Element xpath=//*[@name='desc']
+ Input Text xpath=//input[@name='url'] ${PORTAL_MICRO_ENDPOINT}
+ Click Element xpath=//input[@id='microservice-details-input-security-type']
+ Scroll Element Into View xpath=//li[contains(.,'Basic Authentication')]
+ Click Element xpath=//li[contains(.,'Basic Authentication')]
+ Input Text xpath=//input[@name='username'] ${GLOBAL_PORTAL_ADMIN_USER}
+ Input Text xpath=//input[@name='password'] ${GLOBAL_PORTAL_ADMIN_PWD}
+ Click Button xpath=//button[@id='microservice-details-save-button']
+ Table Column Should Contain xpath=//*[@table-data='serviceList'] 1 TestMS
+ Click Element xpath=(.//*[contains(text(),'TestMS')]/following::*[@ng-click='microserviceOnboarding.deleteService(rowData)'])[1]
+ Click Button xpath=//button[@id="div-confirm-ok-button"]
+ Set Selenium Implicit Wait 3000
+
Portal Admin Create Widget for All users
- [Documentation] Naviage to Create Widget menu tab
+ [Documentation] Navigate to Create Widget menu tab
${WidgetAttachment}= Catenate ${PORTAL_ASSETS_DIRECTORY}//news_widget.zip
Click Link xpath=//a[@title='Widget Onboarding']
Click Button xpath=//button[@ng-click='toggleSidebar()']
@@ -647,7 +702,6 @@ Portal Admin Delete Widget for All users
#Table Column Should Contain .//*[@table-data='portalAdmin.portalAdminsTableData'] 0 ONAP-xDemo
#Set Selenium Implicit Wait 3000
-
Portal Admin Create Widget for Application Roles
[Documentation] Naviage to Create Widget menu tab
${WidgetAttachment}= Catenate ${PORTAL_ASSETS_DIRECTORY}//news_widget.zip
@@ -677,10 +731,7 @@ Portal Admin Create Widget for Application Roles
Page Should Contain ONAP-xDemo
Set Selenium Implicit Wait 3000
GO TO ${PORTAL_HOME_PAGE}
-
-
-
-
+
Portal Admin Delete Widget for Application Roles
#Wait Until Page Contains ONAP-xDemo ${GLOBAL_SELENIUM_BROWSER_WAIT_TIMEOUT}
#Page Should Contain ONAP-xDemo
@@ -698,7 +749,7 @@ Portal Admin Delete Widget for Application Roles
Element Should Not Contain xpath=//*[@table-data='portalAdmin.portalAdminsTableData'] ONAP-xDemo
#Is Element Visible xpath=//*[@table-data='portalAdmin.portalAdminsTableData']
#Table Column Should Contain .//*[@table-data='portalAdmin.portalAdminsTableData'] 0 ONAP-xDemo
- #Set Selenium Implicit Wait 3000
+ Set Selenium Implicit Wait 3000
@@ -720,15 +771,14 @@ Portal Admin Edit Widget
Click Element xpath=//div[@id='confirmation-button-next']
Element Should Not Contain xpath=//*[@table-data='ignoredTableData'] ONAP_VID
Click Link xpath=//a[@id='close-button']
-
-
-
+ Set Selenium Implicit Wait 3000
Portal Admin Broadcast Notifications
[Documentation] Portal Test Admin Broadcast Notifications
- ${CurrentDay}= Get Current Date result_format=%m/%d/%Y
- ${NextDay}= Get Current Date increment=24:00:00 result_format=%m/%d/%Y
- ${CurrentDate}= Get Current Date result_format=%m%d%y%H%M
+
+ ${CurrentDay}= Get Current Date increment=24:00:00 result_format=%m/%d/%Y
+ ${NextDay}= Get Current Date increment=48:00:00 result_format=%m/%d/%Y
+ ${CurrentDate}= Get Current Date increment=24:00:00 result_format=%m%d%y%H%M
${AdminBroadCastMsg}= catenate ONAP VID Broadcast Automation${CurrentDate}
Click Image xpath=//img[@alt='Onap Logo']
Set Selenium Implicit Wait 3000
@@ -744,15 +794,17 @@ Portal Admin Broadcast Notifications
click element xpath=//*[@id="megamenu-notification-button"]
click element xpath=//*[@id="notification-history-link"]
Wait until Element is visible xpath=//*[@id="notification-history-table"] timeout=10
- Table Column Should Contain xpath=//*[@id="notification-history-table"] 2 ${AdminBroadCastMsg}
+ Table Column Should Contain xpath=//*[@id="notification-history-table"] 2 ${AdminBroadCastMsg}
+ Set Selenium Implicit Wait 3000
log ${AdminBroadCastMsg}
[Return] ${AdminBroadCastMsg}
Portal Admin Category Notifications
[Documentation] Portal Admin Broadcast Notifications
- ${CurrentDay}= Get Current Date result_format=%m/%d/%Y
- ${NextDay}= Get Current Date increment=24:00:00 result_format=%m/%d/%Y
- ${CurrentDate}= Get Current Date result_format=%m%d%y%H%M
+ ${CurrentDay}= Get Current Date increment=24:00:00 result_format=%m/%d/%Y
+ ${NextDay}= Get Current Date increment=48:00:00 result_format=%m/%d/%Y
+# ${CurrentDay}= Get Current Date result_format=%m/%d/%Y
+ ${CurrentDate}= Get Current Date increment=24:00:00 result_format=%m%d%y%H%M
${AdminCategoryMsg}= catenate ONAP VID Category Automation${CurrentDate}
Click Link xpath=//a[@id='parent-item-Home']
Click Link xpath=//*[@id="parent-item-User-Notifications"]
@@ -772,9 +824,10 @@ Portal Admin Category Notifications
click element xpath=//*[@id="megamenu-notification-button"]
click element xpath=//*[@id="notification-history-link"]
Wait until Element is visible xpath=//*[@id="notification-history-table"] timeout=10
- Table Column Should Contain xpath=//*[@id="notification-history-table"] 2 ${AdminCategoryMsg}
+ Table Column Should Contain xpath=//*[@id="notification-history-table"] 2 ${AdminCategoryMsg}
+ Set Selenium Implicit Wait 3000
log ${AdminCategoryMsg}
- [Return] ${AdminCategoryMsg}
+ [Return] ${AdminCategoryMsg}
Portal admin Logout from Portal GUI
@@ -834,6 +887,7 @@ Application admin Add Standard User Existing user
Click Element xpath=//span[@id='result-uuid-0']
Click Button xpath=//button[@id='next-button']
Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
+ Set Selenium Implicit Wait 3000
Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
Set Selenium Implicit Wait 3000
Click Button xpath=//button[@id='new-user-save-button']
@@ -854,9 +908,12 @@ Application admin Add Standard User Existing user
Application admin Edit Standard User Existing user
[Documentation] Naviage to Users tab
Click Element xpath=(.//*[@id='rowheader_t1_0'])[2]
+# Click Element xpath=//*[@id='div-app-name-dropdown-Default']
+# Click Element xpath=//*[@id='div-app-name-Default']/following::input[@id='Standard-User-checkbox']
+# Click Element xpath=//*[@id='div-app-name-Default']/following::input[@id='Portal-Notification-Admin-checkbox']
Click Element xpath=//*[@id='div-app-name-dropdown-xDemo-App']
Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Standard-User-checkbox']
- Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='System-Administrator-checkbox']
+ Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::input[@id='Portal-Notification-Admin-checkbox']
Set Selenium Implicit Wait 3000
Click Button xpath=//button[@id='new-user-save-button']
Set Selenium Implicit Wait 3000
@@ -870,15 +927,17 @@ Application admin Edit Standard User Existing user
Application admin Delete Standard User Existing user
[Documentation] Naviage to Users tab
Click Element xpath=(.//*[@id='rowheader_t1_0'])[2]
+# Scroll Element Into View xpath=//*[@id='div-app-name-Default']/following::*[@id='app-item-delete'][1]
+# Click Element xpath=//*[@id='div-app-name-Default']/following::*[@id='app-item-delete'][1]
Scroll Element Into View xpath=//*[@id='div-app-name-xDemo-App']/following::*[@id='app-item-delete'][1]
Click Element xpath=//*[@id='div-app-name-xDemo-App']/following::*[@id='app-item-delete'][1]
Click Element xpath=//button[@id='div-confirm-ok-button']
Click Button xpath=//button[@id='new-user-save-button']
- #Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
- #Is Element Visible xpath=(//*[contains(.,'Portal')] )[2]
+# Input Text xpath=//input[@id='input-table-search'] ${Existing_User}
+# Is Element Visible xpath=(//*[contains(.,'Portal')] )[2]
Element Should Not Contain xpath=//*[@table-data='users.accountUsers'] Portal
#Click Image xpath=//img[@alt='Onap Logo']
- #Set Selenium Implicit Wait 3000
+ Set Selenium Implicit Wait 3000
@@ -918,16 +977,16 @@ Standared user Navigation Application Link Tab
Click Element xpath=.//h3[contains(text(),'xDemo App')]/following::div[1]
Page Should Contain ONAP Portal
Click Element xpath=(.//span[@id='tab-Home'])[1]
-
+ Set Selenium Implicit Wait 3000
Standared user Navigation Functional Menu
[Documentation] Logs into Portal GUI as application admin
Click Link xpath=//a[contains(.,'Manage')]
- Mouse Over xpath=//*[contains(text(),'Technology Insertion')]
- Click Link xpath= //*[contains(text(),'Infrastructure VNF Provisioning')]
- Page Should Contain Welcome to VID
- Click Element xpath=(.//span[@id='tab-Home'])[1]
-
+ Mouse Over xpath=//*[contains(text(),'Technology Insertion')]
+ Click Link xpath= //*[contains(text(),'Infrastructure VNF Provisioning')]
+ Page Should Contain Welcome to VID
+ Click Element xpath=(.//span[@id='tab-Home'])[1]
+ Set Selenium Implicit Wait 3000
Standared user Broadcast Notifications
diff --git a/test/csit/tests/sdc/uiSanity/__init__.robot b/test/csit/tests/sdc/uiSanity/__init__.robot
new file mode 100644
index 000000000..8ee10d5f6
--- /dev/null
+++ b/test/csit/tests/sdc/uiSanity/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Sdc - HealthCheck
diff --git a/test/csit/tests/sdc/uiSanity/test1.robot b/test/csit/tests/sdc/uiSanity/test1.robot
new file mode 100644
index 000000000..3783e159e
--- /dev/null
+++ b/test/csit/tests/sdc/uiSanity/test1.robot
@@ -0,0 +1,16 @@
+*** Settings ***
+Library Collections
+Library OperatingSystem
+Library RequestsLibrary
+Library json
+
+*** Test Cases ***
+Get Requests health check ok
+ [Tags] get
+ CreateSession sdc-be http://localhost:8080
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request sdc-be /sdc2/rest/healthCheck headers=&{headers}
+ Should Be Equal As Strings ${resp.status_code} 500
+ @{ITEMS}= Copy List ${resp.json()['componentsInfo']}
+ : FOR ${ELEMENT} IN @{ITEMS}
+ \ Log ${ELEMENT['healthCheckComponent']} ${ELEMENT['healthCheckStatus']}
diff --git a/test/csit/tests/vfc/nfvo-multivimproxy/test.robot b/test/csit/tests/vfc/nfvo-multivimproxy/test.robot
new file mode 100644
index 000000000..fab3694e4
--- /dev/null
+++ b/test/csit/tests/vfc/nfvo-multivimproxy/test.robot
@@ -0,0 +1,24 @@
+*** settings ***
+Resource ../../common.robot
+Library Collections
+Library RequestsLibrary
+Library simplejson
+Library OperatingSystem
+Library json
+Library HttpLibrary.HTTP
+
+*** Variables ***
+@{return_ok_list}= 200 201 202
+${queryswagger_url} /api/multivimproxy/v1/swagger.json
+
+*** Test Cases ***
+SwaggerFuncTest
+ [Documentation] query swagger info rest test
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${RESMGR_IP}:8481 headers=${headers}
+ ${resp}= Get Request web_session ${queryswagger_url}
+ ${responese_code}= Convert To String ${resp.status_code}
+ List Should Contain Value ${return_ok_list} ${responese_code}
+ ${response_json} json.loads ${resp.content}
+ ${swagger_version}= Convert To String ${response_json['swagger']}
+ Should Be Equal ${swagger_version} 2.0 \ No newline at end of file
diff --git a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
index 07bfe6979..c9dbe6c46 100644
--- a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
+++ b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
@@ -1,113 +1,113 @@
-*** Settings ***
-Resource ../../common.robot
-Library Collections
-Library json
-Library OperatingSystem
-Library RequestsLibrary
-Library HttpLibrary.HTTP
-
-*** Variables ***
-${MSB_IP} 127.0.0.1
-${MSB_PORT} 10550
-${ACTIVITI_IP} 127.0.0.1
-${ACTIVITI_PORT} 8804
-${MGRSERVICE_IP} 127.0.0.1
-${MGRSERVICE_PORT} 8805
-${processId} demo
-${deployid} 0
-${bmpfilepath} ${SCRIPTS}/nfvo-wfengine/demo.bpmn20.xml
-
-*** Test Cases ***
-Deploy BPMN File Test On Activiti
- [Documentation] Check if the test bpmn file can be deployed in activiti engine
- ${auth}= Create List kermit kermit
- ${headers}= Create Dictionary Accept=application/json
- Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
- ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
- ${resp}= Post Request web_session /activiti-rest/service/repository/deployments files=${files}
- Should Be Equal ${resp.status_code} ${201}
- Log ${resp.json()}
- ${deployedId}= Set Variable ${resp.json()["id"]}
- Set Global Variable ${deployedId}
-
-Exectue BPMN File Testt On Activiti
- [Documentation] Check if the test bpmn file can be exectued in activiti engine
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
- Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers}
- ${body} Create Dictionary processDefinitionKey=${processId}
- ${body} dumps ${body}
- ${resp}= Post Request web_session /activiti-rest/service/runtime/process-instances ${body}
- Should Be Equal ${resp.status_code} ${201}
-
-UnDeploy BPMN File Testt On Activiti
- [Documentation] Check if the test bpmn file can be undeployed in activiti engine
- log ${deployedId}
- ${auth}= Create List kermit kermit
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
- ${resp}= Delete Request web_session /activiti-rest/service/repository/deployments/${deployedId}?cascade=true
- Should Be Equal ${resp.status_code} ${204}
-
-Deploy BPMN File Test On MgrService
- [Documentation] Check if the test bpmn file can be deployed in Management Service
- ${auth}= Create List kermit kermit
- ${headers}= Create Dictionary Accept=application/json
- Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
- ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
- ${resp}= Post Request web_session api/workflow/v1/package files=${files}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- ${deployedId}= Set Variable ${resp.json()["deployedId"]}
- Set Global Variable ${deployedId}
-
-Exectue BPMN File Testt On MgrService
- [Documentation] Check if the test bpmn file can be exectued in Management Service
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
- Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers}
- ${body} Create Dictionary processDefinitionKey=${processId}
- ${body} dumps ${body}
- ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
-
-UnDeploy BPMN File Testt On MgrService
- [Documentation] Check if the test bpmn file can be undeployed in Management Service
- log ${deployedId}
- ${auth}= Create List kermit kermit
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
- ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
- Should Be Equal ${resp.status_code} ${200}
-
-Deploy BPMN File Test On MSB
- [Documentation] Check if the test bpmn file can be deployed in activiti engine
- ${auth}= Create List kermit kermit
- ${headers}= Create Dictionary Accept=application/json
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
- ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
- ${resp}= Post Request web_session api/workflow/v1/package files=${files}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- ${deployedId}= Set Variable ${resp.json()["deployedId"]}
- Set Global Variable ${deployedId}
-
-Exectue BPMN File Testt On MSB
- [Documentation] Check if the test bpmn file can be exectued in MSB
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers}
- ${body} Create Dictionary processDefinitionKey=${processId}
- ${body} dumps ${body}
- ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
-
-UnDeploy BPMN File Testt On MSB
- [Documentation] Check if the test bpmn file can be undeployed in MSB
- log ${deployedId}
- ${auth}= Create List kermit kermit
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
- ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
- Should Be Equal ${resp.status_code} ${200}
+*** Settings ***
+Resource ../../common.robot
+Library Collections
+Library json
+Library OperatingSystem
+Library RequestsLibrary
+Library HttpLibrary.HTTP
+
+*** Variables ***
+${MSB_IP} 127.0.0.1
+${MSB_PORT} 10550
+${ACTIVITI_IP} 127.0.0.1
+${ACTIVITI_PORT} 8804
+${MGRSERVICE_IP} 127.0.0.1
+${MGRSERVICE_PORT} 8805
+${processId} demo
+${deployid} 0
+${bmpfilepath} ${SCRIPTS}/nfvo-wfengine/demo.bpmn20.xml
+
+*** Test Cases ***
+Deploy BPMN File Test On Activiti
+ [Documentation] Check if the test bpmn file can be deployed in activiti engine
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session /activiti-rest/service/repository/deployments files=${files}
+ Should Be Equal ${resp.status_code} ${201}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["id"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On Activiti
+ [Documentation] Check if the test bpmn file can be exectued in activiti engine
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session /activiti-rest/service/runtime/process-instances ${body}
+ Should Be Equal ${resp.status_code} ${201}
+
+UnDeploy BPMN File Testt On Activiti
+ [Documentation] Check if the test bpmn file can be undeployed in activiti engine
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${ACTIVITI_IP}:${ACTIVITI_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /activiti-rest/service/repository/deployments/${deployedId}?cascade=true
+ Should Be Equal ${resp.status_code} ${204}
+
+Deploy BPMN File Test On MgrService
+ [Documentation] Check if the test bpmn file can be deployed in Management Service
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session api/workflow/v1/package files=${files}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["deployedId"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On MgrService
+ [Documentation] Check if the test bpmn file can be exectued in Management Service
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
+
+UnDeploy BPMN File Testt On MgrService
+ [Documentation] Check if the test bpmn file can be undeployed in Management Service
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${MGRSERVICE_IP}:${MGRSERVICE_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
+ Should Be Equal ${resp.status_code} ${200}
+
+Deploy BPMN File Test On MSB
+ [Documentation] Check if the test bpmn file can be deployed in activiti engine
+ ${auth}= Create List kermit kermit
+ ${headers}= Create Dictionary Accept=application/json
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+ ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+ ${resp}= Post Request web_session api/workflow/v1/package files=${files}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ ${deployedId}= Set Variable ${resp.json()["deployedId"]}
+ Set Global Variable ${deployedId}
+
+Exectue BPMN File Testt On MSB
+ [Documentation] Check if the test bpmn file can be exectued in MSB
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers}
+ ${body} Create Dictionary processDefinitionKey=${processId}
+ ${body} dumps ${body}
+ ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
+ Should Be Equal ${resp.status_code} ${200}
+ Log ${resp.json()}
+ Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
+
+UnDeploy BPMN File Testt On MSB
+ [Documentation] Check if the test bpmn file can be undeployed in MSB
+ log ${deployedId}
+ ${auth}= Create List kermit kermit
+ ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+ Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+ ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
+ Should Be Equal ${resp.status_code} ${200}
diff --git a/test/csit/tests/vnfsdk-pkgtools/tosca-metadata/csar/test_entry.mf b/test/csit/tests/vnfsdk-pkgtools/tosca-metadata/csar/test_entry.mf
index 710d1a201..4441457e8 100644
--- a/test/csit/tests/vnfsdk-pkgtools/tosca-metadata/csar/test_entry.mf
+++ b/test/csit/tests/vnfsdk-pkgtools/tosca-metadata/csar/test_entry.mf
@@ -1,5 +1,5 @@
metadata:
vnf_product_name: test
vnf_provider_id: test
-vnf_pacakage_version: 1.0
-vnf_release_date_time: 2017.09.15T15:00+8:00
+vnf_package_version: 1.0
+vnf_release_data_time: 2017-09-15T15:00:03+08:00
diff --git a/test/ete/labs/tlab/onap-openstack-template.env b/test/ete/labs/tlab/onap-openstack-template.env
index 52ecae9d8..3da2ca937 100644
--- a/test/ete/labs/tlab/onap-openstack-template.env
+++ b/test/ete/labs/tlab/onap-openstack-template.env
@@ -89,6 +89,10 @@ parameters:
vid_ip_addr: 10.0.8.1
clamp_ip_addr: 10.0.12.1
openo_ip_addr: 10.0.14.1
+ music_ip_addr: 10.0.15.1
+ oof_ip_addr: 10.0.16.1
+ aaf_ip_addr: 10.0.13.1
+ nbi_ip_addr: 10.0.17.1
###########################
# #
@@ -96,6 +100,7 @@ parameters:
# #
###########################
+ dcae_deployment_profile: R2MVP
dnsaas_config_enabled: false
dnsaas_region: RegionOne
dnsaas_keystone_url: https://bdc1tlab01.research.att.com:5000/v3
diff --git a/test/ete/labs/windriver/onap-openstack-template.env b/test/ete/labs/windriver/onap-openstack-template.env
index 27cd0ef62..6f4ea8783 100644
--- a/test/ete/labs/windriver/onap-openstack-template.env
+++ b/test/ete/labs/windriver/onap-openstack-template.env
@@ -89,6 +89,10 @@ parameters:
vid_ip_addr: 10.0.8.1
clamp_ip_addr: 10.0.12.1
openo_ip_addr: 10.0.14.1
+ music_ip_addr: 10.0.15.1
+ oof_ip_addr: 10.0.16.1
+ aaf_ip_addr: 10.0.13.1
+ nbi_ip_addr: 10.0.17.1
###########################
# #
@@ -96,6 +100,7 @@ parameters:
# #
###########################
+ dcae_deployment_profile: R2MVP
dnsaas_config_enabled: true
dnsaas_region: RegionOne
dnsaas_keystone_url: http://10.12.25.5:5000/v3
diff --git a/test/ete/scripts/deploy-onap.sh b/test/ete/scripts/deploy-onap.sh
index 68ca34830..689482c2e 100755
--- a/test/ete/scripts/deploy-onap.sh
+++ b/test/ete/scripts/deploy-onap.sh
@@ -39,13 +39,13 @@ popd
sed "1,/${SENTINEL}/d" ${ENV_SRC} >> ${ENV_FILE}
cat ${ENV_FILE}
-sdiff -w 180 ${ENV_SRC} ${ENV_FILE}
+diff ${ENV_SRC} ${ENV_FILE}
# generate final heat template
# add apt proxy to heat template if applicable
if [ -x $LAB_DIR/apt-proxy.sh ]; then
$LAB_DIR/apt-proxy.sh ${YAML_FILE}
- sdiff -w 180 ${YAML_SRC} ${YAML_FILE}
+ diff ${YAML_SRC} ${YAML_FILE}
fi
@@ -60,7 +60,9 @@ $WORKSPACE/test/ete/scripts/teardown-onap.sh
# create new stack
STACK="ete-$(uuidgen | cut -c-8)"
echo "New Stack Name: ${STACK}"
-openstack stack create -t ${YAML_FILE} -e ${ENV_FILE} $STACK
+if ! openstack stack create -t ${YAML_FILE} -e ${ENV_FILE} $STACK; then
+ exit 1
+fi
while [ "CREATE_IN_PROGRESS" == "$(openstack stack show -c stack_status -f value $STACK)" ]; do
sleep 20
diff --git a/test/vcpe/config_sdnc_so.py b/test/vcpe/config_sdnc_so.py
new file mode 100755
index 000000000..660c70eb4
--- /dev/null
+++ b/test/vcpe/config_sdnc_so.py
@@ -0,0 +1,89 @@
+#! /usr/bin/python
+
+import logging
+from vcpecommon import *
+import csar_parser
+
+
+def insert_customer_service_to_sdnc(vcpecommon):
+ """
+ INSERT INTO SERVICE_MODEL (`service_uuid`, `model_yaml`,`invariant_uuid`,`version`,`name`,`description`,`type`,`category`,`ecomp_naming`,`service_instance_name_prefix`,`filename`,`naming_policy`) values ('7e319b6f-e710-440e-bbd2-63c1004949ef', null, 'a99ace8a-6e3b-447d-b2ff-4614e4234eea',null,'vCPEService', 'vCPEService', 'Service','Network L1-3', 'N', 'vCPEService', 'vCpeResCust110701/service-Vcperescust110701-template.yml',null);
+ INSERT INTO ALLOTTED_RESOURCE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`,`naming_policy`,`ecomp_generated_naming`,`depending_service`,`role`,`type`,`service_dependency`,`allotted_resource_type`) VALUES ( '7e40b664-d7bf-47ad-8f7c-615631d53cd7', NULL, 'f51b0aae-e24a-4cff-b190-fe3daf3d15ee', 'f3137496-1605-40e9-b6df-64aa0f8e91a0', '1.0', NULL,'Y',NULL,NULL,'TunnelXConnect',NULL, 'TunnelXConnect');
+ INSERT INTO ALLOTTED_RESOURCE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`,`naming_policy`,`ecomp_generated_naming`,`depending_service`,`role`,`type`,`service_dependency`,`allotted_resource_type`) VALUES ( 'e46097e1-6a0c-4cf3-a7e5-c39ed407e65e', NULL, 'aa60f6ba-541b-48d6-a5ff-3b0e1f0ad9bf', '0e157d52-b945-422f-b3a8-ab685c2be079', '1.0', NULL,'Y',NULL,NULL,'BRG',NULL, 'TunnelXConnect');
+ INSERT INTO VF_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`,`name`,`naming_policy`,`ecomp_generated_naming`,`avail_zone_max_count`,`nf_function`,`nf_code`,`nf_type`,`nf_role`,`vendor`,`vendor_version`) VALUES ( '3768afa5-cf9e-4071-bb25-3a2e2628dd87', NULL, '5f56893b-d026-4672-b785-7f5ffeb498c6', '7cf28b23-5719-485b-9ab4-dae1a2fa0e07', '1.0', 'vspvgw111601',NULL,'Y',1,NULL,NULL,NULL,NULL,'vCPE','1.0');
+ INSERT INTO VF_MODULE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`,`vf_module_type`,`availability_zone_count`,`ecomp_generated_vm_assignments`) VALUES ( '17a9c7d1-6f8e-4930-aa83-0d323585184f', NULL, 'd772ddd1-7623-40b4-a2a5-ec287916cb51', '6e1a0652-f5e9-4caa-bff8-39bf0c8589a3', '1.0', 'Base',NULL,NULL);
+
+ :return:
+ """
+ logger = logging.getLogger('__name__')
+ logger.info('Inserting customer service data to SDNC DB')
+ csar_file = vcpecommon.find_file('rescust', 'csar', 'csar')
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(csar_file)
+ cmds = []
+ cmds.append("INSERT INTO SERVICE_MODEL (`service_uuid`, `model_yaml`,`invariant_uuid`,`version`,`name`," \
+ "`description`,`type`,`category`,`ecomp_naming`,`service_instance_name_prefix`,`filename`," \
+ "`naming_policy`) values ('{0}', null, '{1}',null,'{2}', 'vCPEService', 'Service','Network L1-3'," \
+ "'N', 'vCPEService', '{3}/{4}',null);".format(parser.svc_model['modelVersionId'],
+ parser.svc_model['modelInvariantId'],
+ parser.svc_model['modelName'],
+ parser.svc_model['modelName'],
+ parser.svc_model['modelName']))
+
+ for model in parser.vnf_models:
+ if 'tunnel' in model['modelCustomizationName'].lower() or 'brg' in model['modelCustomizationName'].lower():
+ cmds.append("INSERT INTO ALLOTTED_RESOURCE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`," \
+ "`uuid`,`version`,`naming_policy`,`ecomp_generated_naming`,`depending_service`,`role`,`type`," \
+ "`service_dependency`,`allotted_resource_type`) VALUES ('{0}',NULL,'{1}','{2}','1.0'," \
+ "NULL,'Y', NULL,NULL,'TunnelXConnect'," \
+ "NULL, 'TunnelXConnect');".format(model['modelCustomizationId'], model['modelInvariantId'],
+ model['modelVersionId']))
+ else:
+ cmds.append("INSERT INTO VF_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`," \
+ "`name`,`naming_policy`,`ecomp_generated_naming`,`avail_zone_max_count`,`nf_function`," \
+ "`nf_code`,`nf_type`,`nf_role`,`vendor`,`vendor_version`) VALUES ('{0}',NULL,'{1}','{2}'," \
+ "'1.0', '{3}',NULL,'Y',1,NULL,NULL,NULL,NULL,'vCPE'," \
+ "'1.0');".format(model['modelCustomizationId'], model['modelInvariantId'],
+ model['modelVersionId'], model['modelCustomizationName'].split()[0]))
+
+ model = parser.vfmodule_models[0]
+ cmds.append("INSERT INTO VF_MODULE_MODEL (`customization_uuid`,`model_yaml`,`invariant_uuid`,`uuid`,`version`," \
+ "`vf_module_type`,`availability_zone_count`,`ecomp_generated_vm_assignments`) VALUES ('{0}', NULL," \
+ "'{1}', '{2}', '1.0', 'Base',NULL,NULL)" \
+ ";".format(model['modelCustomizationId'], model['modelInvariantId'], model['modelVersionId']))
+ print('\n'.join(cmds))
+ vcpecommon.insert_into_sdnc_db(cmds)
+
+
+def insert_customer_service_to_so(vcpecommon):
+ logger = logging.getLogger('__name__')
+ logger.info('Inserting neutron HEAT template to SO DB and creating a recipe for customer service')
+ csar_file = vcpecommon.find_file('rescust', 'csar', 'csar')
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(csar_file)
+ cmds = []
+ cmds.append("INSERT INTO `service_recipe` (`ACTION`, `VERSION_STR`, `DESCRIPTION`, `ORCHESTRATION_URI`, " \
+ "`SERVICE_PARAM_XSD`, `RECIPE_TIMEOUT`, `SERVICE_TIMEOUT_INTERIM`, `CREATION_TIMESTAMP`, " \
+ "`SERVICE_MODEL_UUID`) VALUES ('createInstance','1','{0}'," \
+ "'/mso/async/services/CreateVcpeResCustService',NULL,181,NULL, NOW()," \
+ "'{1}');".format(parser.svc_model['modelName'], parser.svc_model['modelVersionId']))
+
+ cmds.append("delete from `heat_template_params` where"
+ "`HEAT_TEMPLATE_ARTIFACT_UUID`='efee1d84-b8ec-11e7-abc4-cec278b6b50a';")
+ cmds.append("delete from `heat_template` where ARTIFACT_UUID='efee1d84-b8ec-11e7-abc4-cec278b6b50a';")
+ network_tempalte_file = vcpecommon.find_file('neutron', 'yaml', 'preload_templates')
+ with open(network_tempalte_file, 'r') as fin:
+ lines = fin.readlines()
+ longtext = '\n'.join(lines)
+ cmds.append("INSERT INTO `heat_template`(`ARTIFACT_UUID`, `NAME`, `VERSION`, `BODY`, `TIMEOUT_MINUTES`, " \
+ "`DESCRIPTION`, `CREATION_TIMESTAMP`, `ARTIFACT_CHECKSUM`) VALUES(" \
+ "'efee1d84-b8ec-11e7-abc4-cec278b6b50a', 'Generic NeutronNet', '1', '{0}', 10, " \
+ "'Generic Neutron Template', NOW(), 'MANUAL RECORD');".format(longtext))
+
+ cmds.append("INSERT INTO `heat_template_params`(`HEAT_TEMPLATE_ARTIFACT_UUID`, `PARAM_NAME`, `IS_REQUIRED`, " \
+ "`PARAM_TYPE`, `PARAM_ALIAS`) VALUES('efee1d84-b8ec-11e7-abc4-cec278b6b50a', 'shared', 0, " \
+ "'string', NULL);")
+
+ print('\n'.join(cmds))
+ vcpecommon.insert_into_so_db(cmds)
+
diff --git a/test/vcpe/csar_parser.py b/test/vcpe/csar_parser.py
new file mode 100755
index 000000000..f101364d5
--- /dev/null
+++ b/test/vcpe/csar_parser.py
@@ -0,0 +1,231 @@
+#! /usr/bin/python
+import os
+import zipfile
+import shutil
+import yaml
+import json
+import logging
+
+
+class CsarParser:
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self.svc_model = {}
+ self.net_models = [] # there could be multiple networks
+ self.vnf_models = [] # this version only support a single VNF in the service template
+ self.vfmodule_models = [] # this version only support a single VF module in the service template
+
+ def get_service_yaml_from_csar(self, csar_file):
+ """
+ :param csar_file: csar file path name, e.g. 'csar/vgmux.csar'
+ :return:
+ """
+ tmpdir = './__tmp'
+ if os.path.isdir(tmpdir):
+ shutil.rmtree(tmpdir)
+ os.mkdir(tmpdir)
+
+ with zipfile.ZipFile(csar_file, "r") as zip_ref:
+ zip_ref.extractall(tmpdir)
+
+ yamldir = tmpdir + '/Definitions'
+ if os.path.isdir(yamldir):
+ for filename in os.listdir(yamldir):
+ # look for service template like this: service-Vcpesvcbrgemu111601-template.yml
+ if filename.startswith('service-') and filename.endswith('-template.yml'):
+ return os.path.join(yamldir, filename)
+
+ self.logger.error('Invalid file: ' + csar_file)
+ return ''
+
+ def get_service_model_info(self, svc_template):
+ """ extract service model info from yaml and convert to what to be used in SO request
+ Sample from yaml:
+ {
+ "UUID": "aed4fc5e-b871-4e26-8531-ceabd46df85e",
+ "category": "Network L1-3",
+ "description": "Infra service",
+ "ecompGeneratedNaming": true,
+ "invariantUUID": "c806682a-5b3a-44d8-9e88-0708be151296",
+ "name": "vcpesvcinfra111601",
+ "namingPolicy": "",
+ "serviceEcompNaming": true,
+ "serviceRole": "",
+ "serviceType": "",
+ "type": "Service"
+ },
+
+ Convert to
+ {
+ "modelType": "service",
+ "modelInvariantId": "ca4c7a70-06fd-45d8-8b9e-c9745d25bf2b",
+ "modelVersionId": "5d8911b4-e50c-4096-a81e-727a8157193c",
+ "modelName": "vcpesvcbrgemu111601",
+ "modelVersion": "1.0"
+ },
+
+ """
+ if svc_template['metadata']['type'] != 'Service':
+ self.logger.error('csar error: metadata->type is not Service')
+ return
+
+ metadata = svc_template['metadata']
+ self.svc_model = {
+ 'modelType': 'service',
+ 'modelInvariantId': metadata['invariantUUID'],
+ 'modelVersionId': metadata['UUID'],
+ 'modelName': metadata['name']
+ }
+ if 'version' in metadata:
+ self.svc_model['modelVersion'] = metadata['version']
+ else:
+ self.svc_model['modelVersion'] = '1.0'
+
+ def get_vnf_and_network_model_info(self, svc_template):
+ """ extract vnf and network model info from yaml and convert to what to be used in SO request
+ Sample from yaml:
+ "topology_template": {
+ "node_templates": {
+ "CPE_PUBLIC": {
+ "metadata": {
+ "UUID": "33b2c367-a165-4bb3-81c3-0150cd06ceff",
+ "category": "Generic",
+ "customizationUUID": "db1d4ac2-62cd-4e5d-b2dc-300dbd1a5da1",
+ "description": "Generic NeutronNet",
+ "invariantUUID": "3d4c0e47-4794-4e98-a794-baaced668930",
+ "name": "Generic NeutronNet",
+ "resourceVendor": "ATT (Tosca)",
+ "resourceVendorModelNumber": "",
+ "resourceVendorRelease": "1.0.0.wd03",
+ "subcategory": "Network Elements",
+ "type": "VL",
+ "version": "1.0"
+ },
+ "type": "org.openecomp.resource.vl.GenericNeutronNet"
+ },
+ Convert to
+ {
+ "modelType": "network",
+ "modelInvariantId": "3d4c0e47-4794-4e98-a794-baaced668930",
+ "modelVersionId": "33b2c367-a165-4bb3-81c3-0150cd06ceff",
+ "modelName": "Generic NeutronNet",
+ "modelVersion": "1.0",
+ "modelCustomizationId": "db1d4ac2-62cd-4e5d-b2dc-300dbd1a5da1",
+ "modelCustomizationName": "CPE_PUBLIC"
+ },
+ """
+ node_dic = svc_template['topology_template']['node_templates']
+ for node_name, v in node_dic.items():
+ model = {
+ 'modelInvariantId': v['metadata']['invariantUUID'],
+ 'modelVersionId': v['metadata']['UUID'],
+ 'modelName': v['metadata']['name'],
+ 'modelVersion': v['metadata']['version'],
+ 'modelCustomizationId': v['metadata']['customizationUUID'],
+ 'modelCustomizationName': node_name
+ }
+
+ if v['type'].startswith('org.openecomp.resource.vl.GenericNeutronNet'):
+ # a neutron network is found
+ self.logger.info('Parser found a network: ' + node_name)
+ model['modelType'] = 'network'
+ self.net_models.append(model)
+ elif v['type'].startswith('org.openecomp.resource.vf.'):
+ # a VNF is found
+ self.logger.info('Parser found a VNF: ' + node_name)
+ model['modelType'] = 'vnf'
+ self.vnf_models.append(model)
+ else:
+ self.logger.warning('Parser found a node that is neither a network nor a VNF: ' + node_name)
+
+ def get_vfmodule_model_info(self, svc_template):
+ """ extract network model info from yaml and convert to what to be used in SO request
+ Sample from yaml:
+ "topology_template": {
+ "groups": {
+ "vspinfra1116010..Vspinfra111601..base_vcpe_infra..module-0": {
+ "metadata": {
+ "vfModuleModelCustomizationUUID": "11ddac51-30e3-4a3f-92eb-2eb99c2cb288",
+ "vfModuleModelInvariantUUID": "02f70416-581e-4f00-bde1-d65e69af95c5",
+ "vfModuleModelName": "Vspinfra111601..base_vcpe_infra..module-0",
+ "vfModuleModelUUID": "88c78078-f1fd-4f73-bdd9-10420b0f6353",
+ "vfModuleModelVersion": "1"
+ },
+ "properties": {
+ "availability_zone_count": null,
+ "initial_count": 1,
+ "max_vf_module_instances": 1,
+ "min_vf_module_instances": 1,
+ "vf_module_description": null,
+ "vf_module_label": "base_vcpe_infra",
+ "vf_module_type": "Base",
+ "vfc_list": null,
+ "volume_group": false
+ },
+ "type": "org.openecomp.groups.VfModule"
+ }
+ },
+ Convert to
+ {
+ "modelType": "vfModule",
+ "modelInvariantId": "02f70416-581e-4f00-bde1-d65e69af95c5",
+ "modelVersionId": "88c78078-f1fd-4f73-bdd9-10420b0f6353",
+ "modelName": "Vspinfra111601..base_vcpe_infra..module-0",
+ "modelVersion": "1",
+ "modelCustomizationId": "11ddac51-30e3-4a3f-92eb-2eb99c2cb288",
+ "modelCustomizationName": "Vspinfra111601..base_vcpe_infra..module-0"
+ },
+ """
+ node_dic = svc_template['topology_template']['groups']
+ for node_name, v in node_dic.items():
+ if v['type'].startswith('org.openecomp.groups.VfModule'):
+ model = {
+ 'modelType': 'vfModule',
+ 'modelInvariantId': v['metadata']['vfModuleModelInvariantUUID'],
+ 'modelVersionId': v['metadata']['vfModuleModelUUID'],
+ 'modelName': v['metadata']['vfModuleModelName'],
+ 'modelVersion': v['metadata']['vfModuleModelVersion'],
+ 'modelCustomizationId': v['metadata']['vfModuleModelCustomizationUUID'],
+ 'modelCustomizationName': v['metadata']['vfModuleModelName']
+ }
+ self.vfmodule_models.append(model)
+ self.logger.info('Parser found a VF module: ' + model['modelCustomizationName'])
+
+ def parse_service_yaml(self, filename):
+ # clean up
+ self.svc_model = {}
+ self.net_models = [] # there could be multiple networks
+ self.vnf_models = [] # this version only support a single VNF in the service template
+ self.vfmodule_models = [] # this version only support a single VF module in the service template
+
+ svc_template = yaml.load(file(filename, 'r'))
+ self.get_service_model_info(svc_template)
+ self.get_vnf_and_network_model_info(svc_template)
+ self.get_vfmodule_model_info(svc_template)
+
+ return True
+
+ def parse_csar(self, csar_file):
+ yaml_file = self.get_service_yaml_from_csar(csar_file)
+ if yaml_file != '':
+ return self.parse_service_yaml(yaml_file)
+
+ def print_models(self):
+ print('---------Service Model----------')
+ print(json.dumps(self.svc_model, indent=2, sort_keys=True))
+
+ print('---------Network Model(s)----------')
+ for model in self.net_models:
+ print(json.dumps(model, indent=2, sort_keys=True))
+
+ print('---------VNF Model(s)----------')
+ for model in self.vnf_models:
+ print(json.dumps(model, indent=2, sort_keys=True))
+
+ print('---------VF Module Model(s)----------')
+ for model in self.vfmodule_models:
+ print(json.dumps(model, indent=2, sort_keys=True))
+
+ def test(self):
+ self.parse_csar('csar/service-Vcpesvcinfra111601-csar.csar')
+ self.print_models()
diff --git a/test/vcpe/get_info.py b/test/vcpe/get_info.py
new file mode 100755
index 000000000..5b0c6879b
--- /dev/null
+++ b/test/vcpe/get_info.py
@@ -0,0 +1,26 @@
+#! /usr/bin/python
+
+import time
+import logging
+import json
+import mysql.connector
+import ipaddress
+import re
+import sys
+import base64
+from vcpecommon import *
+import preload
+import vcpe_custom_service
+
+
+logging.basicConfig(level=logging.INFO, format='%(message)s')
+
+vcpecommon = VcpeCommon()
+nodes=['brg', 'bng', 'mux', 'dhcp']
+hosts = vcpecommon.get_vm_ip(nodes)
+print(json.dumps(hosts, indent=4, sort_keys=True))
+
+
+
+
+
diff --git a/test/vcpe/healthcheck.py b/test/vcpe/healthcheck.py
new file mode 100755
index 000000000..b94848e13
--- /dev/null
+++ b/test/vcpe/healthcheck.py
@@ -0,0 +1,30 @@
+#! /usr/bin/python
+
+import logging
+import json
+from vcpecommon import *
+import commands
+
+
+logging.basicConfig(level=logging.INFO, format='%(message)s')
+common = VcpeCommon()
+
+print('Checking vGMUX REST API from SDNC')
+cmd = 'curl -u admin:admin -X GET http://10.0.101.21:8183/restconf/config/ietf-interfaces:interfaces'
+ret = commands.getstatusoutput("ssh -i onap_dev root@sdnc '{0}'".format(cmd))
+sz = ret[-1].split('\n')[-1]
+print('\n')
+print(sz)
+
+print('Checking vBRG REST API from SDNC')
+cmd = 'curl -u admin:admin -X GET http://10.3.0.2:8183/restconf/config/ietf-interfaces:interfaces'
+ret = commands.getstatusoutput("ssh -i onap_dev root@sdnc '{0}'".format(cmd))
+sz = ret[-1].split('\n')[-1]
+print('\n')
+print(sz)
+
+print('Checking SDNC DB for vBRG MAC address')
+mac = common.get_brg_mac_from_sdnc()
+print(mac)
+
+
diff --git a/test/vcpe/loop.py b/test/vcpe/loop.py
new file mode 100755
index 000000000..ad5879715
--- /dev/null
+++ b/test/vcpe/loop.py
@@ -0,0 +1,37 @@
+#! /usr/bin/python
+
+import time
+import logging
+import json
+import mysql.connector
+import ipaddress
+import re
+import sys
+import base64
+from vcpecommon import *
+import preload
+import commands
+import vcpe_custom_service
+
+
+logging.basicConfig(level=logging.INFO, format='%(message)s')
+
+cpecommon = VcpeCommon()
+custom = vcpe_custom_service.CustomService(cpecommon)
+
+nodes=['mux']
+hosts = cpecommon.get_vm_ip(nodes)
+
+custom.del_vgmux_ves_mode(hosts['mux'])
+time.sleep(2)
+custom.del_vgmux_ves_collector(hosts['mux'])
+exit()
+
+time.sleep(2)
+logging.info('Setting vGMUX DCAE collector IP address')
+custom.set_vgmux_ves_collector(hosts['mux'])
+time.sleep(2)
+vgmux_vnf_name = cpecommon.load_object('vgmux_vnf_name')
+logging.info('vGMUX VNF instance name is %s', vgmux_vnf_name)
+logging.info('Letting vGMUX report packet loss to DCAE')
+custom.set_vgmux_packet_loss_rate(hosts['mux'], 55, vgmux_vnf_name)
diff --git a/test/vcpe/preload.py b/test/vcpe/preload.py
new file mode 100755
index 000000000..c4efafde6
--- /dev/null
+++ b/test/vcpe/preload.py
@@ -0,0 +1,216 @@
+#! /usr/bin/python
+
+import requests
+import json
+import sys
+from datetime import datetime
+from vcpecommon import *
+import csar_parser
+import logging
+import base64
+
+
+class Preload:
+ def __init__(self, vcpecommon):
+ self.logger = logging.getLogger(__name__)
+ self.vcpecommon = vcpecommon
+
+ def replace(self, sz, replace_dict):
+ for old_string, new_string in replace_dict.items():
+ sz = sz.replace(old_string, new_string)
+ if self.vcpecommon.template_variable_symbol in sz:
+ self.logger.error('Error! Cannot find a value to replace ' + sz)
+ return sz
+
+ def generate_json(self, template_file, replace_dict):
+ with open(template_file) as json_input:
+ json_data = json.load(json_input)
+ stk = [json_data]
+ while len(stk) > 0:
+ data = stk.pop()
+ for k, v in data.items():
+ if type(v) is dict:
+ stk.append(v)
+ elif type(v) is list:
+ stk.extend(v)
+ elif type(v) is str or type(v) is unicode:
+ if self.vcpecommon.template_variable_symbol in v:
+ data[k] = self.replace(v, replace_dict)
+ else:
+ self.logger.warning('Unexpected line in template: %s. Look for value %s', template_file, v)
+ return json_data
+
+ def reset_sniro(self):
+ self.logger.debug('Clearing SNIRO data')
+ r = requests.post(self.vcpecommon.sniro_url + '/reset', headers=self.vcpecommon.sniro_headers)
+ if 2 != r.status_code / 100:
+ self.logger.debug(r.content)
+ self.logger.error('Clearing SNIRO date failed.')
+ sys.exit()
+
+ def preload_sniro(self, template_sniro_data, template_sniro_request, tunnelxconn_ar_name, vgw_name, vbrg_ar_name,
+ vgmux_svc_instance_uuid, vbrg_svc_instance_uuid):
+ self.reset_sniro()
+ self.logger.info('Preloading SNIRO for homing service')
+ replace_dict = {'${tunnelxconn_ar_name}': tunnelxconn_ar_name,
+ '${vgw_name}': vgw_name,
+ '${brg_ar_name}': vbrg_ar_name,
+ '${vgmux_svc_instance_uuid}': vgmux_svc_instance_uuid,
+ '${vbrg_svc_instance_uuid}': vbrg_svc_instance_uuid
+ }
+ sniro_data = self.generate_json(template_sniro_data, replace_dict)
+ self.logger.debug('SNIRO data:')
+ self.logger.debug(json.dumps(sniro_data, indent=4, sort_keys=True))
+
+ base64_sniro_data = base64.b64encode(json.dumps(sniro_data))
+ self.logger.debug('SNIRO data: 64')
+ self.logger.debug(base64_sniro_data)
+ replace_dict = {'${base64_sniro_data}': base64_sniro_data, '${sniro_ip}': self.vcpecommon.hosts['robot']}
+ sniro_request = self.generate_json(template_sniro_request, replace_dict)
+ self.logger.debug('SNIRO request:')
+ self.logger.debug(json.dumps(sniro_request, indent=4, sort_keys=True))
+
+ r = requests.post(self.vcpecommon.sniro_url, headers=self.vcpecommon.sniro_headers, json=sniro_request)
+ if 2 != r.status_code / 100:
+ response = r.json()
+ self.logger.debug(json.dumps(response, indent=4, sort_keys=True))
+ self.logger.error('SNIRO preloading failed.')
+ sys.exit()
+
+ return True
+
+ def preload_network(self, template_file, network_role, subnet_start_ip, subnet_gateway, common_dict, name_suffix):
+ """
+ :param template_file:
+ :param network_role: cpe_signal, cpe_public, brg_bng, bng_mux, mux_gw
+ :param subnet_start_ip:
+ :param subnet_gateway:
+ :param name_suffix: e.g. '201711201311'
+ :return:
+ """
+ network_name = '_'.join([self.vcpecommon.instance_name_prefix['network'], network_role.lower(), name_suffix])
+ subnet_name = self.vcpecommon.network_name_to_subnet_name(network_name)
+ common_dict['${' + network_role+'_net}'] = network_name
+ common_dict['${' + network_role+'_subnet}'] = subnet_name
+ replace_dict = {'${network_role}': network_role,
+ '${service_type}': 'vCPE',
+ '${network_type}': 'Generic NeutronNet',
+ '${network_name}': network_name,
+ '${subnet_start_ip}': subnet_start_ip,
+ '${subnet_gateway}': subnet_gateway
+ }
+ self.logger.info('Preloading network ' + network_role)
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_network_url)
+
+ def preload(self, template_file, replace_dict, url):
+ json_data = self.generate_json(template_file, replace_dict)
+ self.logger.debug(json.dumps(json_data, indent=4, sort_keys=True))
+ r = requests.post(url, headers=self.vcpecommon.sdnc_headers, auth=self.vcpecommon.sdnc_userpass, json=json_data)
+ response = r.json()
+ if int(response.get('output', {}).get('response-code', 0)) != 200:
+ self.logger.debug(json.dumps(response, indent=4, sort_keys=True))
+ self.logger.error('Preloading failed.')
+ return False
+ return True
+
+ def preload_vgw(self, template_file, brg_mac, commont_dict, name_suffix):
+ replace_dict = {'${brg_mac}': brg_mac,
+ '${suffix}': name_suffix
+ }
+ replace_dict.update(commont_dict)
+ self.logger.info('Preloading vGW')
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_vnf_url)
+
+ def preload_vfmodule(self, template_file, service_instance_id, vnf_model, vfmodule_model, common_dict, name_suffix):
+ """
+ :param template_file:
+ :param service_instance_id:
+ :param vnf_model: parsing results from csar_parser
+ :param vfmodule_model: parsing results from csar_parser
+ :param common_dict:
+ :param name_suffix:
+ :return:
+ """
+
+ # examples:
+ # vfmodule_model['modelCustomizationName']: "Vspinfra111601..base_vcpe_infra..module-0",
+ # vnf_model['modelCustomizationName']: "vspinfra111601 0",
+
+ vfmodule_name = '_'.join([self.vcpecommon.instance_name_prefix['vfmodule'],
+ vfmodule_model['modelCustomizationName'].split('..')[0].lower(), name_suffix])
+
+ # vnf_type and generic_vnf_type are identical
+ replace_dict = {'${vnf_type}': vfmodule_model['modelCustomizationName'],
+ '${generic_vnf_type}': vfmodule_model['modelCustomizationName'],
+ '${service_type}': service_instance_id,
+ '${generic_vnf_name}': vnf_model['modelCustomizationName'],
+ '${vnf_name}': vfmodule_name,
+ '${suffix}': name_suffix}
+ replace_dict.update(common_dict)
+ self.logger.info('Preloading VF Module ' + vfmodule_name)
+ return self.preload(template_file, replace_dict, self.vcpecommon.sdnc_preload_vnf_url)
+
+ def preload_all_networks(self, template_file, name_suffix):
+ common_dict = {'${' + k + '}': v for k, v in self.vcpecommon.common_preload_config.items()}
+ for network, v in self.vcpecommon.preload_network_config.items():
+ subnet_start_ip, subnet_gateway_ip = v
+ if not self.preload_network(template_file, network, subnet_start_ip, subnet_gateway_ip,
+ common_dict, name_suffix):
+ return None
+ return common_dict
+
+ def test(self):
+ # this is for testing purpose
+ name_suffix = datetime.now().strftime('%Y%m%d%H%M')
+ vcpecommon = VcpeCommon()
+ preloader = Preload(vcpecommon)
+
+ network_dict = {'${' + k + '}': v for k, v in self.vcpecommon.common_preload_config.items()}
+ template_file = 'preload_templates/template.network.json'
+ for k, v in self.vcpecommon.preload_network_config.items():
+ if not preloader.preload_network(template_file, k, v[0], v[1], network_dict, name_suffix):
+ break
+
+ print('---------------------------------------------------------------')
+ print('Network related replacement dictionary:')
+ print(json.dumps(network_dict, indent=4, sort_keys=True))
+ print('---------------------------------------------------------------')
+
+ keys = ['infra', 'bng', 'gmux', 'brg']
+ for key in keys:
+ csar_file = self.vcpecommon.find_file(key, 'csar', 'csar')
+ template_file = self.vcpecommon.find_file(key, 'json', 'preload_templates')
+ if csar_file and template_file:
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(csar_file)
+ service_instance_id = 'test112233'
+ preloader.preload_vfmodule(template_file, service_instance_id, parser.vnf_models[0],
+ parser.vfmodule_models[0], network_dict, name_suffix)
+
+ def test_sniro(self):
+ template_sniro_data = self.vcpecommon.find_file('sniro_data', 'json', 'preload_templates')
+ template_sniro_request = self.vcpecommon.find_file('sniro_request', 'json', 'preload_templates')
+
+ vcperescust_csar = self.vcpecommon.find_file('rescust', 'csar', 'csar')
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(vcperescust_csar)
+ tunnelxconn_ar_name = None
+ brg_ar_name = None
+ vgw_name = None
+ for model in parser.vnf_models:
+ if 'tunnel' in model['modelCustomizationName']:
+ tunnelxconn_ar_name = model['modelCustomizationName']
+ elif 'brg' in model['modelCustomizationName']:
+ brg_ar_name = model['modelCustomizationName']
+ elif 'vgw' in model['modelCustomizationName']:
+ vgw_name = model['modelCustomizationName']
+
+ if not (tunnelxconn_ar_name and brg_ar_name and vgw_name):
+ self.logger.error('Cannot find all names from %s.', vcperescust_csar)
+ sys.exit()
+
+ vgmux_svc_instance_uuid = '88888888888888'
+ vbrg_svc_instance_uuid = '999999999999999'
+
+ self.preload_sniro(template_sniro_data, template_sniro_request, tunnelxconn_ar_name, vgw_name, brg_ar_name,
+ vgmux_svc_instance_uuid, vbrg_svc_instance_uuid)
diff --git a/test/vcpe/soutils.py b/test/vcpe/soutils.py
new file mode 100755
index 000000000..cc82068a6
--- /dev/null
+++ b/test/vcpe/soutils.py
@@ -0,0 +1,318 @@
+#! /usr/bin/python
+
+import sys
+import logging
+import requests
+import json
+from datetime import datetime
+import progressbar
+import time
+import csar_parser
+import preload
+from vcpecommon import *
+
+
+class SoUtils:
+ def __init__(self, vcpecommon, api_version):
+ """
+ :param vcpecommon:
+ :param api_version: must be 'v4' or 'v5'
+ """
+ self.logger = logging.getLogger(__name__)
+ self.vcpecommon = vcpecommon
+ if api_version not in self.vcpecommon.so_req_api_url:
+ self.logger.error('Incorrect SO API version: %s', api_version)
+ sys.exit()
+ self.service_req_api_url = self.vcpecommon.so_req_api_url[api_version]
+
+ def submit_create_req(self, req_json, req_type, service_instance_id=None, vnf_instance_id=None):
+ """
+ POST {serverRoot}/serviceInstances/v4
+ POST {serverRoot}/serviceInstances/v4/{serviceInstanceId}/vnfs
+ POST {serverRoot}/serviceInstances/v4/{serviceInstanceId}/networks
+ POST {serverRoot}/serviceInstances/v4/{serviceInstanceId}/vnfs/{vnfInstanceId}/vfModules
+ :param req_json:
+ :param service_instance_id: this is required only for networks, vnfs, and vf modules
+ :param req_type:
+ :param vnf_instance_id:
+ :return: req_id, instance_id
+ """
+ if req_type == 'service':
+ url = self.service_req_api_url
+ elif req_type == 'vnf':
+ url = '/'.join([self.service_req_api_url, service_instance_id, 'vnfs'])
+ elif req_type == 'network':
+ url = '/'.join([self.service_req_api_url, service_instance_id, 'networks'])
+ elif req_type == 'vfmodule':
+ url = '/'.join([self.service_req_api_url, service_instance_id, 'vnfs', vnf_instance_id, 'vfModules'])
+ else:
+ self.logger.error('Invalid request type: {0}. Can only be service/vnf/network/vfmodule'.format(req_type))
+ return None, None
+
+ r = requests.post(url, headers=self.vcpecommon.so_headers, auth=self.vcpecommon.so_userpass, json=req_json)
+ response = r.json()
+
+ self.logger.debug('---------------------------------------------------------------')
+ self.logger.debug('------- Creation request submitted to SO, got response --------')
+ self.logger.debug(json.dumps(response, indent=4, sort_keys=True))
+ self.logger.debug('---------------------------------------------------------------')
+ req_id = response.get('requestReferences', {}).get('requestId', '')
+ instance_id = response.get('requestReferences', {}).get('instanceId', '')
+ return req_id, instance_id
+
+ def check_progress(self, req_id, eta=0, interval=5):
+ if not req_id:
+ self.logger.error('Error when checking SO request progress, invalid request ID: ' + req_id)
+ return False
+ duration = 0.0
+ bar = progressbar.ProgressBar(redirect_stdout=True)
+ url = self.vcpecommon.so_check_progress_api_url + '/' + req_id
+
+ while True:
+ time.sleep(interval)
+ r = requests.get(url, headers=self.vcpecommon.so_headers, auth=self.vcpecommon.so_userpass)
+ response = r.json()
+
+ duration += interval
+ if eta > 0:
+ percentage = min(95, 100 * duration / eta)
+ else:
+ percentage = int(response['request']['requestStatus']['percentProgress'])
+
+ if response['request']['requestStatus']['requestState'] == 'IN_PROGRESS':
+ self.logger.debug('------------------Request Status-------------------------------')
+ self.logger.debug(json.dumps(response, indent=4, sort_keys=True))
+ bar.update(percentage)
+ else:
+ self.logger.debug('---------------------------------------------------------------')
+ self.logger.debug('----------------- Creation Request Results --------------------')
+ self.logger.debug(json.dumps(response, indent=4, sort_keys=True))
+ self.logger.debug('---------------------------------------------------------------')
+ flag = response['request']['requestStatus']['requestState'] == 'COMPLETE'
+ if not flag:
+ self.logger.error('Request failed.')
+ self.logger.error(json.dumps(response, indent=4, sort_keys=True))
+ bar.update(100)
+ bar.finish()
+ return flag
+
+ def add_req_info(self, req_details, instance_name, product_family_id=None):
+ req_details['requestInfo'] = {
+ 'instanceName': instance_name,
+ 'source': 'VID',
+ 'suppressRollback': 'true',
+ 'requestorId': 'vCPE-Robot'
+ }
+ if product_family_id:
+ req_details['requestInfo']['productFamilyId'] = product_family_id
+
+ def add_related_instance(self, req_details, instance_id, instance_model):
+ instance = {"instanceId": instance_id, "modelInfo": instance_model}
+ if 'relatedInstanceList' not in req_details:
+ req_details['relatedInstanceList'] = [{"relatedInstance": instance}]
+ else:
+ req_details['relatedInstanceList'].append({"relatedInstance": instance})
+
+ def generate_vnf_or_network_request(self, req_type, instance_name, vnf_or_network_model, service_instance_id,
+ service_model):
+ req_details = {
+ 'modelInfo': vnf_or_network_model,
+ 'cloudConfiguration': {"lcpCloudRegionId": self.vcpecommon.os_region_name,
+ "tenantId": self.vcpecommon.os_tenant_id},
+ 'requestParameters': {"userParams": []}
+ }
+ self.add_req_info(req_details, instance_name, self.vcpecommon.product_family_id)
+ self.add_related_instance(req_details, service_instance_id, service_model)
+ return {'requestDetails': req_details}
+
+ def generate_vfmodule_request(self, instance_name, vfmodule_model, service_instance_id,
+ service_model, vnf_instance_id, vnf_model):
+ req_details = {
+ 'modelInfo': vfmodule_model,
+ 'cloudConfiguration': {"lcpCloudRegionId": self.vcpecommon.os_region_name,
+ "tenantId": self.vcpecommon.os_tenant_id},
+ 'requestParameters': {"usePreload": 'true'}
+ }
+ self.add_req_info(req_details, instance_name, self.vcpecommon.product_family_id)
+ self.add_related_instance(req_details, service_instance_id, service_model)
+ self.add_related_instance(req_details, vnf_instance_id, vnf_model)
+ return {'requestDetails': req_details}
+
+ def generate_service_request(self, instance_name, model):
+ req_details = {
+ 'modelInfo': model,
+ 'subscriberInfo': {'globalSubscriberId': self.vcpecommon.global_subscriber_id},
+ 'requestParameters': {
+ "userParams": [],
+ "subscriptionServiceType": "vCPE",
+ "aLaCarte": 'true'
+ }
+ }
+ self.add_req_info(req_details, instance_name)
+ return {'requestDetails': req_details}
+
+ def generate_custom_service_request(self, instance_name, model, brg_mac):
+ req_details = {
+ 'modelInfo': model,
+ 'subscriberInfo': {'subscriberName': 'Kaneohe',
+ 'globalSubscriberId': self.vcpecommon.global_subscriber_id},
+ 'cloudConfiguration': {"lcpCloudRegionId": self.vcpecommon.os_region_name,
+ "tenantId": self.vcpecommon.os_tenant_id},
+ 'requestParameters': {
+ "userParams": [
+ {
+ 'name': 'BRG_WAN_MAC_Address',
+ 'value': brg_mac
+ }
+ ],
+ "subscriptionServiceType": "vCPE",
+ 'aLaCarte': 'false'
+ }
+ }
+ self.add_req_info(req_details, instance_name, self.vcpecommon.custom_product_family_id)
+ return {'requestDetails': req_details}
+
+ def create_custom_service(self, csar_file, brg_mac, name_suffix=None):
+ parser = csar_parser.CsarParser()
+ if not parser.parse_csar(csar_file):
+ return False
+
+ # yyyymmdd_hhmm
+ if not name_suffix:
+ name_suffix = '_' + datetime.now().strftime('%Y%m%d%H%M')
+
+ # create service
+ instance_name = '_'.join([self.vcpecommon.instance_name_prefix['service'],
+ parser.svc_model['modelName'], name_suffix])
+ instance_name = instance_name.lower()
+ req = self.generate_custom_service_request(instance_name, parser.svc_model, brg_mac)
+ self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
+ self.logger.info('Creating custom service {0}.'.format(instance_name))
+ req_id, svc_instance_id = self.submit_create_req(req, 'service')
+ if not self.check_progress(req_id, 140):
+ return False
+ return True
+
+ def wait_for_aai(self, node_type, uuid):
+ self.logger.info('Waiting for AAI traversal to complete...')
+ bar = progressbar.ProgressBar()
+ for i in range(30):
+ time.sleep(1)
+ bar.update(i*100.0/30)
+ if self.vcpecommon.is_node_in_aai(node_type, uuid):
+ bar.update(100)
+ bar.finish()
+ return
+
+ self.logger.error("AAI traversal didn't finish in 30 seconds. Something is wrong. Type {0}, UUID {1}".format(
+ node_type, uuid))
+ sys.exit()
+
+ def create_entire_service(self, csar_file, vnf_template_file, preload_dict, name_suffix, heatbridge=False):
+ """
+ :param csar_file:
+ :param vnf_template_file:
+ :param preload_dict:
+ :param name_suffix:
+ :return: service instance UUID
+ """
+ self.logger.info('\n----------------------------------------------------------------------------------')
+ self.logger.info('Start to create entire service defined in csar: {0}'.format(csar_file))
+ parser = csar_parser.CsarParser()
+ self.logger.info('Parsing csar ...')
+ if not parser.parse_csar(csar_file):
+ self.logger.error('Cannot parse csar: {0}'.format(csar_file))
+ return None
+
+ # create service
+ instance_name = '_'.join([self.vcpecommon.instance_name_prefix['service'],
+ parser.svc_model['modelName'], name_suffix])
+ instance_name = instance_name.lower()
+ self.logger.info('Creating service instance: {0}.'.format(instance_name))
+ req = self.generate_service_request(instance_name, parser.svc_model)
+ self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
+ req_id, svc_instance_id = self.submit_create_req(req, 'service')
+ if not self.check_progress(req_id, eta=2, interval=1):
+ return None
+
+ # wait for AAI to complete traversal
+ self.wait_for_aai('service', svc_instance_id)
+
+ # create networks
+ for model in parser.net_models:
+ base_name = model['modelCustomizationName'].lower().replace('mux_vg', 'mux_gw')
+ network_name = '_'.join([self.vcpecommon.instance_name_prefix['network'], base_name, name_suffix])
+ network_name = network_name.lower()
+ self.logger.info('Creating network: ' + network_name)
+ req = self.generate_vnf_or_network_request('network', network_name, model, svc_instance_id,
+ parser.svc_model)
+ self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
+ req_id, net_instance_id = self.submit_create_req(req, 'network', svc_instance_id)
+ if not self.check_progress(req_id, eta=20):
+ return None
+
+ self.logger.info('Changing subnet name to ' + self.vcpecommon.network_name_to_subnet_name(network_name))
+ self.vcpecommon.set_network_name(network_name)
+ subnet_name_changed = False
+ for i in range(20):
+ time.sleep(3)
+ if self.vcpecommon.set_subnet_name(network_name):
+ subnet_name_changed = True
+ break
+
+ if not subnet_name_changed:
+ self.logger.error('Failed to change subnet name for ' + network_name)
+ return None
+
+
+ vnf_model = None
+ vnf_instance_id = None
+ # create VNF
+ if len(parser.vnf_models) == 1:
+ vnf_model = parser.vnf_models[0]
+ vnf_instance_name = '_'.join([self.vcpecommon.instance_name_prefix['vnf'],
+ vnf_model['modelCustomizationName'].split(' ')[0], name_suffix])
+ vnf_instance_name = vnf_instance_name.lower()
+ self.logger.info('Creating VNF: ' + vnf_instance_name)
+ req = self.generate_vnf_or_network_request('vnf', vnf_instance_name, vnf_model, svc_instance_id,
+ parser.svc_model)
+ self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
+ req_id, vnf_instance_id = self.submit_create_req(req, 'vnf', svc_instance_id)
+ if not self.check_progress(req_id, eta=2, interval=1):
+ self.logger.error('Failed to create VNF {0}.'.format(vnf_instance_name))
+ return False
+
+ # wait for AAI to complete traversal
+ if not vnf_instance_id:
+ self.logger.error('No VNF instance ID returned!')
+ sys.exit()
+ self.wait_for_aai('vnf', vnf_instance_id)
+
+ preloader = preload.Preload(self.vcpecommon)
+ preloader.preload_vfmodule(vnf_template_file, svc_instance_id, parser.vnf_models[0], parser.vfmodule_models[0],
+ preload_dict, name_suffix)
+ # create VF Module
+ if len(parser.vfmodule_models) == 1:
+ if not vnf_instance_id or not vnf_model:
+ self.logger.error('Invalid VNF instance ID or VNF model!')
+ sys.exit()
+
+ model = parser.vfmodule_models[0]
+ vfmodule_instance_name = '_'.join([self.vcpecommon.instance_name_prefix['vfmodule'],
+ model['modelCustomizationName'].split('..')[0], name_suffix])
+ vfmodule_instance_name = vfmodule_instance_name.lower()
+ self.logger.info('Creating VF Module: ' + vfmodule_instance_name)
+ req = self.generate_vfmodule_request(vfmodule_instance_name, model, svc_instance_id, parser.svc_model,
+ vnf_instance_id, vnf_model)
+ self.logger.debug(json.dumps(req, indent=2, sort_keys=True))
+ req_id, vfmodule_instance_id = self.submit_create_req(req, 'vfmodule', svc_instance_id, vnf_instance_id)
+ if not self.check_progress(req_id, eta=70, interval=5):
+ self.logger.error('Failed to create VF Module {0}.'.format(vfmodule_instance_name))
+ return None
+
+ # run heatbridge
+ if heatbridge:
+ self.vcpecommon.headbridge(vfmodule_instance_name, svc_instance_id)
+ self.vcpecommon.save_vgmux_vnf_name(vnf_instance_name)
+
+ return svc_instance_id
diff --git a/test/vcpe/vcpe.py b/test/vcpe/vcpe.py
new file mode 100755
index 000000000..7de86ae8d
--- /dev/null
+++ b/test/vcpe/vcpe.py
@@ -0,0 +1,207 @@
+#! /usr/bin/python
+import sys
+import logging
+from vcpecommon import *
+import soutils
+from datetime import datetime
+import preload
+import vcpe_custom_service
+import csar_parser
+import config_sdnc_so
+
+
+def config_sniro(vcpecommon, vgmux_svc_instance_uuid, vbrg_svc_instance_uuid):
+ logger = logging.getLogger(__name__)
+
+ logger.info('\n----------------------------------------------------------------------------------')
+ logger.info('Start to config SNIRO homing emulator')
+
+ preloader = preload.Preload(vcpecommon)
+ template_sniro_data = vcpecommon.find_file('sniro_data', 'json', 'preload_templates')
+ template_sniro_request = vcpecommon.find_file('sniro_request', 'json', 'preload_templates')
+
+ vcperescust_csar = vcpecommon.find_file('rescust', 'csar', 'csar')
+ parser = csar_parser.CsarParser()
+ parser.parse_csar(vcperescust_csar)
+ tunnelxconn_ar_name = None
+ brg_ar_name = None
+ vgw_name = None
+ for model in parser.vnf_models:
+ if 'tunnel' in model['modelCustomizationName']:
+ tunnelxconn_ar_name = model['modelCustomizationName']
+ elif 'brg' in model['modelCustomizationName']:
+ brg_ar_name = model['modelCustomizationName']
+ elif 'vgw' in model['modelCustomizationName']:
+ vgw_name = model['modelCustomizationName']
+
+ if not (tunnelxconn_ar_name and brg_ar_name and vgw_name):
+ logger.error('Cannot find all names from %s.', vcperescust_csar)
+ sys.exit()
+
+ preloader.preload_sniro(template_sniro_data, template_sniro_request, tunnelxconn_ar_name, vgw_name, brg_ar_name,
+ vgmux_svc_instance_uuid, vbrg_svc_instance_uuid)
+
+
+def create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict, suffix, heatbridge=False):
+ """
+ :return: service instance UUID
+ """
+ so = soutils.SoUtils(vcpecommon, 'v4')
+ return so.create_entire_service(csar_file, vnf_template_file, preload_dict, suffix, heatbridge)
+
+def deploy_brg_only():
+ logging.basicConfig(level=logging.INFO, format='%(message)s')
+ logger = logging.getLogger(__name__)
+
+ vcpecommon = VcpeCommon()
+ preload_dict = vcpecommon.load_preload_data()
+ name_suffix = preload_dict['${brg_bng_net}'].split('_')[-1]
+
+ # create multiple services based on the pre-determined order
+ svc_instance_uuid = vcpecommon.load_object(vcpecommon.svc_instance_uuid_file)
+ for keyword in ['brg']:
+ heatbridge = 'gmux' == keyword
+ csar_file = vcpecommon.find_file(keyword, 'csar', 'csar')
+ vnf_template_file = vcpecommon.find_file(keyword, 'json', 'preload_templates')
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
+ name_suffix, heatbridge)
+ if not svc_instance_uuid[keyword]:
+ sys.exit()
+
+ # Setting up SNIRO
+ config_sniro(vcpecommon, svc_instance_uuid['gmux'], svc_instance_uuid['brg'])
+
+def deploy_infra():
+ logging.basicConfig(level=logging.INFO, format='%(message)s')
+ logger = logging.getLogger(__name__)
+
+ vcpecommon = VcpeCommon()
+
+ # preload all networks
+ network_template = vcpecommon.find_file('network', 'json', 'preload_templates')
+ name_suffix = datetime.now().strftime('%Y%m%d%H%M')
+ preloader = preload.Preload(vcpecommon)
+ preload_dict = preloader.preload_all_networks(network_template, name_suffix)
+ logger.debug('Initial preload dictionary:')
+ logger.debug(json.dumps(preload_dict, indent=4, sort_keys=True))
+ if not preload_dict:
+ logger.error("Failed to preload networks.")
+ sys.exit()
+ vcpecommon.save_preload_data(preload_dict)
+
+ # create multiple services based on the pre-determined order
+ svc_instance_uuid = {}
+ for keyword in ['infra', 'bng', 'gmux', 'brg']:
+ heatbridge = 'gmux' == keyword
+ csar_file = vcpecommon.find_file(keyword, 'csar', 'csar')
+ vnf_template_file = vcpecommon.find_file(keyword, 'json', 'preload_templates')
+ svc_instance_uuid[keyword] = create_one_service(vcpecommon, csar_file, vnf_template_file, preload_dict,
+ name_suffix, heatbridge)
+ if not svc_instance_uuid[keyword]:
+ sys.exit()
+
+ vcpecommon.save_object(svc_instance_uuid, vcpecommon.svc_instance_uuid_file)
+ # Setting up SNIRO
+ config_sniro(vcpecommon, svc_instance_uuid['gmux'], svc_instance_uuid['brg'])
+
+ print('----------------------------------------------------------------------------------------------------')
+ print('Congratulations! The following have been completed correctly:')
+ print(' - Infrastructure Service Instantiation: ')
+ print(' * 4 VMs: DHCP, AAA, DNS, Web Server')
+ print(' * 2 Networks: CPE_PUBLIC, CPE_SIGNAL')
+ print(' - vBNG Service Instantiation: ')
+ print(' * 1 VM: vBNG')
+ print(' * 2 Networks: BRG_BNG, BNG_MUX')
+ print(' - vGMUX Service Instantiation: ')
+ print(' * 1 VM: vGMUX')
+ print(' * 1 Network: MUX_GW')
+ print(' - vBRG Service Instantiation: ')
+ print(' * 1 VM: vBRG')
+ print(' - Adding vGMUX vServer information to AAI.')
+ print(' - SNIRO Homing Emulator configuration.')
+
+
+def deploy_custom_service():
+ nodes = ['brg', 'mux']
+ vcpecommon = VcpeCommon(nodes)
+ custom_service = vcpe_custom_service.CustomService(vcpecommon)
+
+ # clean up
+ host_dic = {k: vcpecommon.hosts[k] for k in nodes}
+ if not vcpecommon.delete_vxlan_interfaces(host_dic):
+ sys.exit()
+
+ custom_service.clean_up_sdnc()
+ custom_service.del_all_vgw_stacks(vcpecommon.vgw_name_keyword)
+
+ # create new service
+ csar_file = vcpecommon.find_file('rescust', 'csar', 'csar')
+ vgw_template_file = vcpecommon.find_file('vgw', 'json', 'preload_templates')
+ preload_dict = vcpecommon.load_preload_data()
+ custom_service.create_custom_service(csar_file, vgw_template_file, preload_dict)
+
+
+def closed_loop(lossrate=0):
+ if lossrate > 0:
+ while 'y' != raw_input('Please enter docker container "drools" in Policy VM and type "policy stop". Then enter y here: ').lower():
+ continue
+ nodes = ['brg', 'mux']
+ logger = logging.getLogger('__name__')
+ vcpecommon = VcpeCommon(nodes)
+ logger.info('Cleaning up vGMUX data reporting settings')
+ vcpecommon.del_vgmux_ves_mode()
+ time.sleep(2)
+ vcpecommon.del_vgmux_ves_collector()
+
+ logger.info('Staring vGMUX data reporting to DCAE')
+ time.sleep(2)
+ vcpecommon.set_vgmux_ves_collector()
+
+ logger.info('Setting vGMUX to report packet loss rate: %s', lossrate)
+ time.sleep(2)
+ vcpecommon.set_vgmux_packet_loss_rate(lossrate, vcpecommon.load_vgmux_vnf_name())
+ if lossrate > 0:
+ print('Please enter docker container "drools" in Policy VM and type "policy start". Then observe vGMUX being restarted.')
+
+
+def init_so_sdnc():
+ logger = logging.getLogger('__name__')
+ vcpecommon = VcpeCommon()
+ config_sdnc_so.insert_customer_service_to_so(vcpecommon)
+ config_sdnc_so.insert_customer_service_to_sdnc(vcpecommon)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO, format='%(message)s')
+
+ print('----------------------------------------------------------------------------------------------------')
+ print(' vcpe.py: Brief info about this program')
+# print(' vcpe.py sdc: Onboard VNFs, design and distribute vCPE services (under development)')
+ print(' vcpe.py init: Add customer service data to SDNC and SO DBs.')
+ print(' vcpe.py infra: Deploy infrastructure, including DHCP, AAA, DNS, Web Server, vBNG, vGMUX, vBRG.')
+ print(' vcpe.py customer: Deploy customer service, including vGW and VxLANs')
+ print(' vcpe.py loop: Test closed loop control')
+ print('----------------------------------------------------------------------------------------------------')
+
+ if len(sys.argv) != 2:
+ sys.exit()
+
+ if sys.argv[1] == 'sdc':
+ print('Under development')
+ elif sys.argv[1] == 'init':
+ if 'y' == raw_input('Ready to add customer service data to SDNC and SO DBs? This is needed only once.'
+ 'y/n: ').lower():
+ init_so_sdnc()
+ elif sys.argv[1] == 'infra':
+ if 'y' == raw_input('Ready to deploy infrastructure? y/n: ').lower():
+ deploy_infra()
+ elif sys.argv[1] == 'customer':
+ if 'y' == raw_input('Ready to deploy customer service? y/n: ').lower():
+ deploy_custom_service()
+ elif sys.argv[1] == 'loop':
+ closed_loop(22)
+ elif sys.argv[1] == 'noloss':
+ closed_loop(0)
+ elif sys.argv[1] == 'brg':
+ deploy_brg_only()
+
diff --git a/test/vcpe/vcpe_custom_service.py b/test/vcpe/vcpe_custom_service.py
new file mode 100755
index 000000000..d89129eef
--- /dev/null
+++ b/test/vcpe/vcpe_custom_service.py
@@ -0,0 +1,80 @@
+#! /usr/bin/python
+
+import os
+import requests
+import time
+from vcpecommon import *
+from datetime import datetime
+import soutils
+import logging
+import preload
+import json
+
+
+class CustomService:
+ def __init__(self, vcpecommon):
+ self.logger = logging.getLogger(__name__)
+ self.vcpecommon = vcpecommon
+
+ # delete all vgw stacks
+ def del_all_vgw_stacks(self, keyword):
+ param = ' '.join([k + ' ' + v for k, v in self.vcpecommon.cloud.items()])
+ openstackcmd = 'openstack ' + param + ' '
+
+ stacks = os.popen(openstackcmd + 'stack list').read()
+ found = False
+ for stack_description in stacks.split('\n'):
+ if keyword in stack_description:
+ found = True
+ stack_name = stack_description.split('|')[2].strip()
+ cmd = openstackcmd + 'stack delete -y ' + stack_name
+ self.logger.info('Deleting ' + stack_name)
+ os.popen(cmd)
+
+ if not found:
+ self.logger.info('No vGW stack to delete')
+
+ # clean up SDNC
+ def clean_up_sdnc(self):
+ items = ['tunnelxconn-allotted-resources', 'brg-allotted-resources']
+ for res in items:
+ self.logger.info('Cleaning up ' + res + ' from SDNC')
+ requests.delete(self.vcpecommon.sdnc_ar_cleanup_url + res, auth=self.vcpecommon.sdnc_userpass)
+
+ def print_success_info(self, print_instructions=True, nodes=None):
+ if not nodes:
+ nodes = ['brg', 'mux', 'gw', 'web']
+ ip_dict = self.vcpecommon.get_vm_ip(nodes, self.vcpecommon.external_net_addr,
+ self.vcpecommon.external_net_prefix_len)
+
+ print(json.dumps(ip_dict, indent=4, sort_keys=True))
+ for node in ['brg', 'mux']:
+ print('VxLAN config in {0}:'.format(node))
+ self.vcpecommon.get_vxlan_interfaces(ip_dict[node], print_info=True)
+
+ print(json.dumps(ip_dict, indent=4, sort_keys=True))
+
+ if print_instructions:
+ print('----------------------------------------------------------------------------')
+ print('Custom service created successfully. See above for VxLAN configuration info.')
+ print('To test data plane connectivity, following the steps below.')
+ print(' 1. ssh to vGW at {0}'.format(ip_dict['gw']))
+ print(' 2. Restart DHCP: systemctl restart isc-dhcp-server')
+ print(' 3. ssh to vBRG at {0}'.format(ip_dict['brg']))
+ print(' 4. Get IP from vGW: dhclient lstack')
+ print(' 5. Add route to Internet: ip route add 10.2.0.0/24 via 192.168.1.254 dev lstack')
+ print(' 6. ping the web server: ping {0}'.format('10.2.0.10'))
+ print(' 7. wget http://{0}'.format('10.2.0.10'))
+
+ def create_custom_service(self, csar_file, vgw_template_file, preload_dict=None):
+ name_suffix = datetime.now().strftime('%Y%m%d%H%M')
+ brg_mac = self.vcpecommon.get_brg_mac_from_sdnc()
+ # preload vGW
+ if preload_dict:
+ preloader = preload.Preload(self.vcpecommon)
+ preloader.preload_vgw(vgw_template_file, brg_mac, preload_dict, name_suffix)
+
+ # create service
+ so = soutils.SoUtils(self.vcpecommon, 'v5')
+ if so.create_custom_service(csar_file, brg_mac, name_suffix):
+ self.print_success_info()
diff --git a/test/vcpe/vcpecommon.py b/test/vcpe/vcpecommon.py
new file mode 100755
index 000000000..5b3e009a3
--- /dev/null
+++ b/test/vcpe/vcpecommon.py
@@ -0,0 +1,414 @@
+import json
+import logging
+import os
+import pickle
+import re
+import sys
+
+import ipaddress
+import mysql.connector
+import requests
+import commands
+import time
+
+
+class VcpeCommon:
+ #############################################################################################
+ # Start: configurations that you must change for a new ONAP installation
+ external_net_addr = '10.12.0.0'
+ external_net_prefix_len = 16
+ #############################################################################################
+ # set the openstack cloud access credentials here
+ cloud = {
+ '--os-auth-url': 'http://10.12.25.2:5000',
+ '--os-username': 'YOUR ID',
+ '--os-user-domain-id': 'default',
+ '--os-project-domain-id': 'default',
+ '--os-tenant-id': '087050388b204c73a3e418dd2c1fe30b',
+ '--os-region-name': 'RegionOne',
+ '--os-password': 'YOUR PASSWD',
+ '--os-project-domain-name': 'Integration-SB-01',
+ '--os-identity-api-version': '3'
+ }
+
+ common_preload_config = {
+ 'oam_onap_net': 'oam_onap_c4Uw',
+ 'oam_onap_subnet': 'oam_onap_c4Uw',
+ 'public_net': 'external',
+ 'public_net_id': '971040b2-7059-49dc-b220-4fab50cb2ad4'
+ }
+ # End: configurations that you must change for a new ONAP installation
+ #############################################################################################
+
+ template_variable_symbol = '${'
+ #############################################################################################
+ # preloading network config
+ # key=network role
+ # value = [subnet_start_ip, subnet_gateway_ip]
+ preload_network_config = {
+ 'cpe_public': ['10.2.0.2', '10.2.0.1'],
+ 'cpe_signal': ['10.4.0.2', '10.4.0.1'],
+ 'brg_bng': ['10.3.0.2', '10.3.0.1'],
+ 'bng_mux': ['10.1.0.10', '10.1.0.1'],
+ 'mux_gw': ['10.5.0.10', '10.5.0.1']
+ }
+
+ global_subscriber_id = 'SDN-ETHERNET-INTERNET'
+
+ def __init__(self, extra_host_names=None):
+ self.logger = logging.getLogger(__name__)
+ self.logger.info('Initializing configuration')
+
+ self.host_names = ['so', 'sdnc', 'robot', 'aai-inst1', 'dcaedoks00']
+ if extra_host_names:
+ self.host_names.extend(extra_host_names)
+ # get IP addresses
+ self.hosts = self.get_vm_ip(self.host_names, self.external_net_addr, self.external_net_prefix_len)
+ # this is the keyword used to name vgw stack, must not be used in other stacks
+ self.vgw_name_keyword = 'base_vcpe_vgw'
+ self.svc_instance_uuid_file = '__var/svc_instance_uuid'
+ self.preload_dict_file = '__var/preload_dict'
+ self.vgmux_vnf_name_file = '__var/vgmux_vnf_name'
+ self.product_family_id = 'f9457e8c-4afd-45da-9389-46acd9bf5116'
+ self.custom_product_family_id = 'a9a77d5a-123e-4ca2-9eb9-0b015d2ee0fb'
+ self.instance_name_prefix = {
+ 'service': 'vcpe_svc',
+ 'network': 'vcpe_net',
+ 'vnf': 'vcpe_vnf',
+ 'vfmodule': 'vcpe_vfmodule'
+ }
+ self.aai_userpass = 'AAI', 'AAI'
+ self.pub_key = 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh'
+ self.os_tenant_id = self.cloud['--os-tenant-id']
+ self.os_region_name = self.cloud['--os-region-name']
+ self.common_preload_config['pub_key'] = self.pub_key
+ self.sniro_url = 'http://' + self.hosts['robot'] + ':8080/__admin/mappings'
+ self.sniro_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+
+ #############################################################################################
+ # SDNC urls
+ self.sdnc_userpass = 'admin', 'Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U'
+ self.sdnc_db_name = 'sdnctl'
+ self.sdnc_db_user = 'sdnctl'
+ self.sdnc_db_pass = 'gamma'
+ self.sdnc_db_port = '32768'
+ self.sdnc_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+ self.sdnc_preload_network_url = 'http://' + self.hosts['sdnc'] + \
+ ':8282/restconf/operations/VNF-API:preload-network-topology-operation'
+ self.sdnc_preload_vnf_url = 'http://' + self.hosts['sdnc'] + \
+ ':8282/restconf/operations/VNF-API:preload-vnf-topology-operation'
+ self.sdnc_ar_cleanup_url = 'http://' + self.hosts['sdnc'] + ':8282/restconf/config/GENERIC-RESOURCE-API:'
+
+ #############################################################################################
+ # SO urls, note: do NOT add a '/' at the end of the url
+ self.so_req_api_url = {'v4': 'http://' + self.hosts['so'] + ':8080/ecomp/mso/infra/serviceInstances/v4',
+ 'v5': 'http://' + self.hosts['so'] + ':8080/ecomp/mso/infra/serviceInstances/v5'}
+ self.so_check_progress_api_url = 'http://' + self.hosts['so'] + ':8080/ecomp/mso/infra/orchestrationRequests/v2'
+ self.so_userpass = 'InfraPortalClient', 'password1$'
+ self.so_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+ self.so_db_name = 'mso_catalog'
+ self.so_db_user = 'root'
+ self.so_db_pass = 'password'
+ self.so_db_port = '32768'
+
+ self.vpp_inf_url = 'http://{0}:8183/restconf/config/ietf-interfaces:interfaces'
+ self.vpp_api_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+ self.vpp_api_userpass = ('admin', 'admin')
+ self.vpp_ves_url= 'http://{0}:8183/restconf/config/vesagent:vesagent'
+
+ def headbridge(self, openstack_stack_name, svc_instance_uuid):
+ """
+ Add vserver information to AAI
+ """
+ self.logger.info('Adding vServer information to AAI for {0}'.format(openstack_stack_name))
+ cmd = '/opt/demo.sh heatbridge {0} {1} vCPE'.format(openstack_stack_name, svc_instance_uuid)
+ ret = commands.getstatusoutput("ssh -i onap_dev root@{0} '{1}'".format(self.hosts['robot'], cmd))
+ self.logger.debug('%s', ret)
+
+ def get_brg_mac_from_sdnc(self):
+ """
+ :return: BRG MAC address. Currently we only support one BRG instance.
+ """
+ cnx = mysql.connector.connect(user=self.sdnc_db_user, password=self.sdnc_db_pass, database=self.sdnc_db_name,
+ host=self.hosts['sdnc'], port=self.sdnc_db_port)
+ cursor = cnx.cursor()
+ query = "SELECT * from DHCP_MAP"
+ cursor.execute(query)
+
+ self.logger.debug('DHCP_MAP table in SDNC')
+ counter = 0
+ mac = None
+ for mac, ip in cursor:
+ counter += 1
+ self.logger.debug(mac + ':' + ip)
+
+ cnx.close()
+
+ if counter != 1:
+ self.logger.error('Found %s MAC addresses in DHCP_MAP', counter)
+ sys.exit()
+ else:
+ self.logger.debug('Found MAC addresses in DHCP_MAP: %s', mac)
+ return mac
+
+ def insert_into_sdnc_db(self, cmds):
+ cnx = mysql.connector.connect(user=self.sdnc_db_user, password=self.sdnc_db_pass, database=self.sdnc_db_name,
+ host=self.hosts['sdnc'], port=self.sdnc_db_port)
+ cursor = cnx.cursor()
+ for cmd in cmds:
+ self.logger.debug(cmd)
+ cursor.execute(cmd)
+ self.logger.debug('%s', cursor)
+ cnx.commit()
+ cursor.close()
+ cnx.close()
+
+ def insert_into_so_db(self, cmds):
+ cnx = mysql.connector.connect(user=self.so_db_user, password=self.so_db_pass, database=self.so_db_name,
+ host=self.hosts['so'], port=self.so_db_port)
+ cursor = cnx.cursor()
+ for cmd in cmds:
+ self.logger.debug(cmd)
+ cursor.execute(cmd)
+ self.logger.debug('%s', cursor)
+ cnx.commit()
+ cursor.close()
+ cnx.close()
+
+ def find_file(self, file_name_keyword, file_ext, search_dir):
+ """
+ :param file_name_keyword: keyword used to look for the csar file, case insensitive matching, e.g, infra
+ :param file_ext: e.g., csar, json
+ :param search_dir path to search
+ :return: path name of the file
+ """
+ file_name_keyword = file_name_keyword.lower()
+ file_ext = file_ext.lower()
+ if not file_ext.startswith('.'):
+ file_ext = '.' + file_ext
+
+ filenamepath = None
+ for file_name in os.listdir(search_dir):
+ file_name_lower = file_name.lower()
+ if file_name_keyword in file_name_lower and file_name_lower.endswith(file_ext):
+ if filenamepath:
+ self.logger.error('Multiple files found for *{0}*.{1} in '
+ 'directory {2}'.format(file_name_keyword, file_ext, search_dir))
+ sys.exit()
+ filenamepath = os.path.abspath(os.path.join(search_dir, file_name))
+
+ if filenamepath:
+ return filenamepath
+ else:
+ self.logger.error("Cannot find *{0}*{1} in directory {2}".format(file_name_keyword, file_ext, search_dir))
+ sys.exit()
+
+ @staticmethod
+ def network_name_to_subnet_name(network_name):
+ """
+ :param network_name: example: vcpe_net_cpe_signal_201711281221
+ :return: vcpe_net_cpe_signal_subnet_201711281221
+ """
+ fields = network_name.split('_')
+ fields.insert(-1, 'subnet')
+ return '_'.join(fields)
+
+ def set_network_name(self, network_name):
+ param = ' '.join([k + ' ' + v for k, v in self.cloud.items()])
+ openstackcmd = 'openstack ' + param
+ cmd = ' '.join([openstackcmd, 'network set --name', network_name, 'ONAP-NW1'])
+ os.popen(cmd)
+
+ def set_subnet_name(self, network_name):
+ """
+ Example: network_name = vcpe_net_cpe_signal_201711281221
+ set subnet name to vcpe_net_cpe_signal_subnet_201711281221
+ :return:
+ """
+ param = ' '.join([k + ' ' + v for k, v in self.cloud.items()])
+ openstackcmd = 'openstack ' + param
+
+ # expected results: | subnets | subnet_id |
+ subnet_info = os.popen(openstackcmd + ' network show ' + network_name + ' |grep subnets').read().split('|')
+ if len(subnet_info) > 2 and subnet_info[1].strip() == 'subnets':
+ subnet_id = subnet_info[2].strip()
+ subnet_name = self.network_name_to_subnet_name(network_name)
+ cmd = ' '.join([openstackcmd, 'subnet set --name', subnet_name, subnet_id])
+ os.popen(cmd)
+ self.logger.info("Subnet name set to: " + subnet_name)
+ return True
+ else:
+ self.logger.error("Can't get subnet info from network name: " + network_name)
+ return False
+
+ def is_node_in_aai(self, node_type, node_uuid):
+ key = None
+ search_node_type = None
+ if node_type == 'service':
+ search_node_type = 'service-instance'
+ key = 'service-instance-id'
+ elif node_type == 'vnf':
+ search_node_type = 'generic-vnf'
+ key = 'vnf-id'
+ else:
+ logging.error('Invalid node_type: ' + node_type)
+ sys.exit()
+
+ url = 'https://{0}:8443/aai/v11/search/nodes-query?search-node-type={1}&filter={2}:EQUALS:{3}'.format(
+ self.hosts['aai-inst1'], search_node_type, key, node_uuid)
+
+ headers = {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-FromAppID': 'vCPE-Robot'}
+ requests.packages.urllib3.disable_warnings()
+ r = requests.get(url, headers=headers, auth=self.aai_userpass, verify=False)
+ response = r.json()
+ self.logger.debug('aai query: ' + url)
+ self.logger.debug('aai response:\n' + json.dumps(response, indent=4, sort_keys=True))
+ return 'result-data' in response
+
+ @staticmethod
+ def extract_ip_from_str(net_addr, net_addr_len, sz):
+ """
+ :param net_addr: e.g. 10.5.12.0
+ :param net_addr_len: e.g. 24
+ :param sz: a string
+ :return: the first IP address matching the network, e.g. 10.5.12.3
+ """
+ network = ipaddress.ip_network(unicode('{0}/{1}'.format(net_addr, net_addr_len)), strict=False)
+ ip_list = re.findall(r'[0-9]+(?:\.[0-9]+){3}', sz)
+ for ip in ip_list:
+ this_net = ipaddress.ip_network(unicode('{0}/{1}'.format(ip, net_addr_len)), strict=False)
+ if this_net == network:
+ return str(ip)
+ return None
+
+ def get_vm_ip(self, keywords, net_addr=None, net_addr_len=None):
+ """
+ :param keywords: list of keywords to search for vm, e.g. ['bng', 'gmux', 'brg']
+ :param net_addr: e.g. 10.12.5.0
+ :param net_addr_len: e.g. 24
+ :return: dictionary {keyword: ip}
+ """
+ if not net_addr:
+ net_addr = self.external_net_addr
+
+ if not net_addr_len:
+ net_addr_len = self.external_net_prefix_len
+
+ param = ' '.join([k + ' ' + v for k, v in self.cloud.items() if 'identity' not in k])
+ openstackcmd = 'nova ' + param + ' list'
+ self.logger.debug(openstackcmd)
+
+ ip_dict = {}
+ results = os.popen(openstackcmd).read()
+ for line in results.split('\n'):
+ fields = line.split('|')
+ if len(fields) == 8:
+ vm_name = fields[2]
+ ip_info = fields[-2]
+ for keyword in keywords:
+ if keyword in vm_name:
+ ip = self.extract_ip_from_str(net_addr, net_addr_len, ip_info)
+ if ip:
+ ip_dict[keyword] = ip
+ if len(ip_dict) != len(keywords):
+ self.logger.error('Cannot find all desired IP addresses for %s.', keywords)
+ self.logger.error(json.dumps(ip_dict, indent=4, sort_keys=True))
+ sys.exit()
+ return ip_dict
+
+ def del_vgmux_ves_mode(self):
+ url = self.vpp_ves_url.format(self.hosts['mux']) + '/mode'
+ r = requests.delete(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
+ self.logger.debug('%s', r)
+
+ def del_vgmux_ves_collector(self):
+ url = self.vpp_ves_url.format(self.hosts['mux']) + '/config'
+ r = requests.delete(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
+ self.logger.debug('%s', r)
+
+ def set_vgmux_ves_collector(self ):
+ url = self.vpp_ves_url.format(self.hosts['mux'])
+ data = {'config':
+ {'server-addr': self.hosts['dcaedoks00'],
+ 'server-port': '8080',
+ 'read-interval': '10',
+ 'is-add':'1'
+ }
+ }
+ r = requests.post(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass, json=data)
+ self.logger.debug('%s', r)
+
+ def set_vgmux_packet_loss_rate(self, lossrate, vg_vnf_instance_name):
+ url = self.vpp_ves_url.format(self.hosts['mux'])
+ data = {"mode":
+ {"working-mode": "demo",
+ "base-packet-loss": str(lossrate),
+ "source-name": vg_vnf_instance_name
+ }
+ }
+ r = requests.post(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass, json=data)
+ self.logger.debug('%s', r)
+
+ # return all the VxLAN interface names of BRG or vGMUX based on the IP address
+ def get_vxlan_interfaces(self, ip, print_info=False):
+ url = self.vpp_inf_url.format(ip)
+ self.logger.debug('url is this: %s', url)
+ r = requests.get(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
+ data = r.json()['interfaces']['interface']
+ if print_info:
+ for inf in data:
+ if 'name' in inf and 'type' in inf and inf['type'] == 'v3po:vxlan-tunnel':
+ print(json.dumps(inf, indent=4, sort_keys=True))
+
+ return [inf['name'] for inf in data if 'name' in inf and 'type' in inf and inf['type'] == 'v3po:vxlan-tunnel']
+
+ # delete all VxLAN interfaces of each hosts
+ def delete_vxlan_interfaces(self, host_dic):
+ for host, ip in host_dic.items():
+ deleted = False
+ self.logger.info('{0}: Getting VxLAN interfaces'.format(host))
+ inf_list = self.get_vxlan_interfaces(ip)
+ for inf in inf_list:
+ deleted = True
+ time.sleep(2)
+ self.logger.info("{0}: Deleting VxLAN crossconnect {1}".format(host, inf))
+ url = self.vpp_inf_url.format(ip) + '/interface/' + inf + '/v3po:l2'
+ requests.delete(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
+
+ for inf in inf_list:
+ deleted = True
+ time.sleep(2)
+ self.logger.info("{0}: Deleting VxLAN interface {1}".format(host, inf))
+ url = self.vpp_inf_url.format(ip) + '/interface/' + inf
+ requests.delete(url, headers=self.vpp_api_headers, auth=self.vpp_api_userpass)
+
+ if len(self.get_vxlan_interfaces(ip)) > 0:
+ self.logger.error("Error deleting VxLAN from {0}, try to restart the VM, IP is {1}.".format(host, ip))
+ return False
+
+ if not deleted:
+ self.logger.info("{0}: no VxLAN interface found, nothing to delete".format(host))
+ return True
+
+ @staticmethod
+ def save_object(obj, filepathname):
+ with open(filepathname, 'wb') as fout:
+ pickle.dump(obj, fout)
+
+ @staticmethod
+ def load_object(filepathname):
+ with open(filepathname, 'rb') as fin:
+ return pickle.load(fin)
+
+ def save_preload_data(self, preload_data):
+ self.save_object(preload_data, self.preload_dict_file)
+
+ def load_preload_data(self):
+ return self.load_object(self.preload_dict_file)
+
+ def save_vgmux_vnf_name(self, vgmux_vnf_name):
+ self.save_object(vgmux_vnf_name, self.vgmux_vnf_name_file)
+
+ def load_vgmux_vnf_name(self):
+ return self.load_object(self.vgmux_vnf_name_file)
+
diff --git a/version-manifest/pom.xml b/version-manifest/pom.xml
index 20a7210fb..b2102d5dc 100644
--- a/version-manifest/pom.xml
+++ b/version-manifest/pom.xml
@@ -4,11 +4,11 @@
<parent>
<groupId>org.onap.oparent</groupId>
<artifactId>oparent</artifactId>
- <version>0.1.1</version>
+ <version>1.1.0</version>
</parent>
<groupId>org.onap.integration</groupId>
<artifactId>version-manifest</artifactId>
- <version>1.0.0-SNAPSHOT</version>
+ <version>1.0.1-SNAPSHOT</version>
<packaging>maven-plugin</packaging>
<name>ONAP Version Manifest and Maven Plugin</name>
<url>https://www.onap.org</url>
@@ -92,7 +92,7 @@
<artifactId>exec-maven-plugin</artifactId>
<version>1.6.0</version>
<configuration>
- <executable>sort</executable>
+ <executable>bash</executable>
</configuration>
<executions>
<execution>
@@ -103,7 +103,7 @@
</goals>
<configuration>
<arguments>
- <argument>-c</argument>
+ <argument>${project.basedir}/src/main/scripts/check-sorted.sh</argument>
<argument>${project.basedir}/src/main/resources/docker-manifest.csv</argument>
</arguments>
</configuration>
@@ -116,7 +116,7 @@
</goals>
<configuration>
<arguments>
- <argument>-c</argument>
+ <argument>${project.basedir}/src/main/scripts/check-sorted.sh</argument>
<argument>${project.basedir}/src/main/resources/java-manifest.csv</argument>
</arguments>
</configuration>
diff --git a/version-manifest/src/main/resources/docker-manifest.csv b/version-manifest/src/main/resources/docker-manifest.csv
index e9443d1b5..f0dc07849 100644
--- a/version-manifest/src/main/resources/docker-manifest.csv
+++ b/version-manifest/src/main/resources/docker-manifest.csv
@@ -1,74 +1,87 @@
image,tag
-onap/aai/esr-gui,v1.0.0
-onap/aai/esr-server,v1.0.0
+onap/aaf,latest
onap/aai-resources,1.2-STAGING-latest
onap/aai-traversal,1.2-STAGING-latest
-onap/admportal-sdnc-image,v1.2.1
-onap/ccsdk-dgbuilder-image,v0.1.0
-onap/ccsdk-odl-image,v0.1.0
-onap/ccsdk-odlsli-image,v0.1.0
+onap/aai/esr-gui,1.1.0-SNAPSHOT
+onap/aai/esr-server,1.1.0-SNAPSHOT
+onap/admportal-sdnc-image,1.3-STAGING-latest
+onap/appc-image,1.3.0-SNAPSHOT-latest
+onap/ccsdk-dgbuilder-image,0.2.1-SNAPSHOT
+onap/ccsdk-odl-image,0.2.1-SNAPSHOT
+onap/ccsdk-odlsli-image,0.2.1-SNAPSHOT
onap/clamp,2.0-STAGING-latest
onap/cli,v1.1.0
onap/data-router,1.2-STAGING-latest
onap/dmaap/dmaap-mr,1.0.1
+onap/externalapi/nbi,latest
onap/holmes/engine-management,v1.0.0
onap/holmes/rule-management,v1.0.0
-onap/modeling/javatoscachecker,v1.0.0
onap/model-loader,1.2-STAGING-latest
-onap/msb/msb_apigateway,1.0.0
-onap/msb/msb_discovery,1.0.0
+onap/modeling/javatoscachecker,latest
+onap/msb/msb_apigateway,1.1.0-STAGING-latest
+onap/msb/msb_discovery,1.1.0-STAGING-latest
onap/multicloud/framework,v1.0.0
-onap/multicloud/openstack-newton,v1.0.0
-onap/multicloud/openstack-ocata,v1.0.0
-onap/multicloud/openstack-windriver,v1.0.0
-onap/multicloud/vio,v1.0.0
-onap/multicloud/vio-vesagent,v1.0.0
+onap/multicloud/openstack-newton,1.0.0-SNAPSHOT
+onap/multicloud/openstack-ocata,1.0.0-SNAPSHOT
+onap/multicloud/openstack-windriver,1.0.0-SNAPSHOT
+onap/multicloud/vio,1.0.0-SNAPSHOT
+onap/multicloud/vio-vesagent,1.0.0
+onap/music,latest
+onap/oof,latest
onap/oom/kube2msb,1.0.0
-onap/org.onap.dcaegen2.collectors.ves.vescollector,v1.1.0
-onap/org.onap.dcaegen2.deployments.bootstrap,v1.1.1
+onap/org.onap.dcaegen2.collectors.snmptrap,latest
+onap/org.onap.dcaegen2.collectors.ves.vescollector,latest
+onap/org.onap.dcaegen2.deployments.bootstrap,1.1.2
+onap/org.onap.dcaegen2.deployments.cm-container,latest
+onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container,latest
+onap/org.onap.dcaegen2.deployments.redis-cluster-container,latest
+onap/org.onap.dcaegen2.deployments.tca-cdap-container,latest
onap/org.onap.dcaegen2.platform.cdapbroker,v4.0.0
-onap/org.onap.dcaegen2.platform.configbinding,v1.2.0
-onap/org.onap.dcaegen2.platform.deployment-handler,v1.1.0
-onap/org.onap.dcaegen2.platform.inventory-api,v1.2.0
-onap/org.onap.dcaegen2.platform.policy-handler,v1.1.0
-onap/org.onap.dcaegen2.platform.servicechange-handler,v1.0.0
-onap/policy/policy-db,v1.1.1
-onap/policy/policy-drools,v1.1.1
-onap/policy/policy-nexus,v1.1.1
-onap/policy/policy-pe,v1.1.1
-onap/portal-apps,v1.3.0
-onap/portal-db,v1.3.0
-onap/portal-wms,v1.3.0
-onap/sdc/sdc-workflow-designer,1.0.0-STAGING-latest
-onap/sdnc-dmaap-listener-image,v1.2.1
-onap/sdnc-image,v1.2.1
-onap/sdnc-ueb-listener-image,v1.2.1
+onap/org.onap.dcaegen2.platform.configbinding,latest
+onap/org.onap.dcaegen2.platform.deployment-handler,latest
+onap/org.onap.dcaegen2.platform.inventory-api,latest
+onap/org.onap.dcaegen2.platform.policy-handler,latest
+onap/org.onap.dcaegen2.platform.servicechange-handler,latest
+onap/org.onap.dcaegen2.services.heartbeat,latest
+onap/org.onap.dcaegen2.services.prh.prh-app-server,latest
+onap/policy-drools,1.2-STAGING-latest
+onap/policy-pe,1.2-STAGING-latest
+onap/portal-app,2.1-STAGING-latest
+onap/portal-db,2.1-STAGING-latest
+onap/portal-wms,2.1-STAGING-latest
+onap/sdc-backend,1.2-STAGING-latest
+onap/sdc-elasticsearch,1.2-STAGING-latest
+onap/sdc-frontend,1.2-STAGING-latest
+onap/sdc-kibana,1.2-STAGING-latest
+onap/sdc-sanity,1.2-STAGING-latest
+onap/sdc/sdc-workflow-designer,1.1.0-SNAPSHOT-STAGING-latest
+onap/sdnc-dmaap-listener-image,1.3-STAGING-latest
+onap/sdnc-image,1.3-STAGING-latest
+onap/sdnc-ueb-listener-image,1.3-STAGING-latest
onap/search-data-service,1.2-STAGING-latest
onap/sparky-be,1.2-STAGING-latest
-onap/usecase-ui/usecase-ui-server,v1.0.1
-onap/usecase-ui,v1.0.1
-onap/vfc/catalog,v1.0.2
-onap/vfc/emsdriver,v1.0.1
-onap/vfc/gvnfmdriver,v1.0.1
-onap/vfc/jujudriver,v1.0.0
-onap/vfc/nfvo/svnfm/huawei,v1.0.2
-onap/vfc/nfvo/svnfm/nokia,v1.0.2
-onap/vfc/nslcm,v1.0.2
-onap/vfc/resmanagement,v1.0.0
-onap/vfc/vnflcm,v1.0.1
-onap/vfc/vnfmgr,v1.0.1
-onap/vfc/vnfres,v1.0.1
-onap/vfc/wfengine-activiti,v1.0.0
-onap/vfc/wfengine-mgrservice,v1.0.0
-onap/vfc/ztesdncdriver,v1.0.0
-onap/vfc/ztevnfmdriver,v1.0.2
+onap/testsuite,1.2-STAGING-latest
+onap/usecase-ui,1.1.0-SNAPSHOT-STAGING-latest
+onap/usecase-ui/usecase-ui-server,1.1.0-SNAPSHOT-STAGING-latest
+onap/vfc/catalog,1.1.0-STAGING-latest
+onap/vfc/emsdriver,1.1.0-STAGING-latest
+onap/vfc/gvnfmdriver,1.1.0-STAGING-latest
+onap/vfc/jujudriver,1.1.0-STAGING-latest
+onap/vfc/multivimproxy,1.0.0-STAGING-latest
+onap/vfc/nfvo/svnfm/huawei,1.1.0-STAGING-latest
+onap/vfc/nfvo/svnfm/nokia,1.1.0-STAGING-latest
+onap/vfc/nfvo/svnfm/nokiav2,1.1.0-STAGING-latest
+onap/vfc/nslcm,1.1.0-STAGING-latest
+onap/vfc/resmanagement,1.1.0-STAGING-latest
+onap/vfc/vnflcm,1.1.0-STAGING-latest
+onap/vfc/vnfmgr,1.1.0-STAGING-latest
+onap/vfc/vnfres,1.1.0-STAGING-latest
+onap/vfc/wfengine-activiti,latest
+onap/vfc/wfengine-mgrservice,latest
+onap/vfc/ztesdncdriver,1.1.0-STAGING-latest
+onap/vfc/ztevmanagerdriver,1.0.3-STAGING-latest
+onap/vfc/ztevnfmdriver,1.1.0-STAGING-latest
onap/vid,1.2.1
onap/vnfsdk/refrepo,1.0.0
onap/vnfsdk/refrepo-postgres,1.0.0
-openecomp/appc-image,v1.2.0
-openecomp/mso,v1.1.1
-openecomp/sdc-backend,v1.1.0
-openecomp/sdc-elasticsearch,v1.1.0
-openecomp/sdc-frontend,v1.1.0
-openecomp/sdc-kibana,v1.1.0
-openecomp/sdc-sanity,v1.1.0
+openecomp/mso,1.2.1
diff --git a/version-manifest/src/main/resources/java-manifest.csv b/version-manifest/src/main/resources/java-manifest.csv
index 5f0857534..4bed11cda 100644
--- a/version-manifest/src/main/resources/java-manifest.csv
+++ b/version-manifest/src/main/resources/java-manifest.csv
@@ -1,10 +1,35 @@
groupId,artifactId,version
-org.onap.aai.aai-common,aai-annotations,1.2.0
-org.onap.aai.aai-common,aai-auth,1.2.0
-org.onap.aai.aai-common,aai-common,1.2.0
-org.onap.aai.aai-common,aai-core,1.2.0
-org.onap.aai.aai-common,aai-schema,1.2.0
-org.onap.aai.aai-common,aai-utils,1.2.0
+org.onap.aaf.authz,aaf-auth-batch,2.1.0
+org.onap.aaf.authz,aaf-auth-cass,2.1.0
+org.onap.aaf.authz,aaf-auth-certman,2.1.0
+org.onap.aaf.authz,aaf-auth-client,2.1.0
+org.onap.aaf.authz,aaf-auth-cmd,2.1.0
+org.onap.aaf.authz,aaf-auth-core,2.1.0
+org.onap.aaf.authz,aaf-auth-deforg,2.1.0
+org.onap.aaf.authz,aaf-auth-fs,2.1.0
+org.onap.aaf.authz,aaf-auth-gui,2.1.0
+org.onap.aaf.authz,aaf-auth-hello,2.1.0
+org.onap.aaf.authz,aaf-auth-locate,2.1.0
+org.onap.aaf.authz,aaf-auth-oauth,2.1.0
+org.onap.aaf.authz,aaf-auth-service,2.1.0
+org.onap.aaf.authz,aaf-cadi-aaf,2.1.0
+org.onap.aaf.authz,aaf-cadi-cass,2.1.0
+org.onap.aaf.authz,aaf-cadi-client,2.1.0
+org.onap.aaf.authz,aaf-cadi-core,2.1.0
+org.onap.aaf.authz,aaf-cadi-oauth-enduser,2.1.0
+org.onap.aaf.authz,aaf-cadi-shiro,2.1.0
+org.onap.aaf.authz,aaf-misc-env,2.1.0
+org.onap.aaf.authz,aaf-misc-log4j,2.1.0
+org.onap.aaf.authz,aaf-misc-rosetta,2.1.0
+org.onap.aaf.authz,aaf-misc-xgen,2.1.0
+org.onap.aai,rest-client,1.2.0
+org.onap.aai,search-data-service,1.2.0
+org.onap.aai.aai-common,aai-annotations,1.2.1
+org.onap.aai.aai-common,aai-auth,1.2.1
+org.onap.aai.aai-common,aai-common,1.2.1
+org.onap.aai.aai-common,aai-core,1.2.1
+org.onap.aai.aai-common,aai-schema,1.2.1
+org.onap.aai.aai-common,aai-utils,1.2.1
org.onap.aai.data-router,data-router,1.2.0
org.onap.aai.esr-gui,aai-esr-gui,1.0.0
org.onap.aai.esr-gui,common,1.0.0
@@ -25,9 +50,7 @@ org.onap.aai.logging-service,logging-service,1.2.0
org.onap.aai.model-loader,model-loader,1.2.0
org.onap.aai.resources,aai-resources,1.2.0
org.onap.aai.resources,resources,1.2.0
-org.onap.aai,rest-client,1.2.0
org.onap.aai.router-core,router-core,1.2.0
-org.onap.aai,search-data-service,1.2.0
org.onap.aai.sparky-be,sparky-be,1.2.0
org.onap.aai.sparky-fe,sparky-fe,1.2.0
org.onap.aai.traversal,aai-traversal,1.2.0
@@ -39,11 +62,11 @@ org.onap.ccsdk.sli.adaptors,resource-assignment-provider,0.1.0
org.onap.ccsdk.sli.adaptors,sql-resource-provider,0.1.0
org.onap.ccsdk.sli.core,dblib-provider,0.1.2
org.onap.ccsdk.sli.core,filters-provider,0.1.2
-org.onap.ccsdk.sli.core,sliapi-provider,0.1.2
org.onap.ccsdk.sli.core,sli-common,0.1.2
-org.onap.ccsdk.sli.core,sliPluginUtils-provider,0.1.2
org.onap.ccsdk.sli.core,sli-provider,0.1.2
org.onap.ccsdk.sli.core,sli-recording,0.1.2
+org.onap.ccsdk.sli.core,sliPluginUtils-provider,0.1.2
+org.onap.ccsdk.sli.core,sliapi-provider,0.1.2
org.onap.ccsdk.sli.core,utils-provider,1.0.0
org.onap.ccsdk.sli.northbound,asdcApi-provider,0.1.0
org.onap.ccsdk.sli.northbound,dataChange-provider,0.1.0
@@ -54,7 +77,7 @@ org.onap.ccsdk.sli.plugins,properties-node-provider,0.1.0
org.onap.ccsdk.sli.plugins,restapi-call-node-provider,0.1.0
org.onap.ccsdk.storage.pgaas,pgaas,1.0.0
org.onap.ccsdk.utils,utils,1.0.0
-org.onap.clamp.clds.clamp,clamp,1.1.0
+org.onap.clamp.clds.clamp,clamp,2.0.0
org.onap.cli,cli-framework,1.1.0
org.onap.cli,cli-main,1.1.0
org.onap.cli,cli-plugins-aai,1.1.0
@@ -66,21 +89,25 @@ org.onap.cli,cli-sample-mock-generator,1.1.0
org.onap.cli,cli-sample-yaml-generator,1.1.0
org.onap.cli,cli-validation,1.1.0
org.onap.cli,cli-zip,1.1.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-aai,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-common,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-plugins,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-tca,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-common,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-dmaap,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-it,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-model,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-tca,2.0.0
-org.onap.dcaegen2.analytics.tca,dcae-analytics-test,2.0.0
-org.onap.dcaegen2.collectors.ves,VESCollector,1.1.4
org.onap.dcaegen2,dcaegen2,1.1.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-aai,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-common,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-plugins,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-cdap-tca,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-common,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-dmaap,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-it,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-model,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-tca,2.2.0
+org.onap.dcaegen2.analytics.tca,dcae-analytics-test,2.2.0
+org.onap.dcaegen2.collectors.ves,VESCollector,1.2.4
org.onap.dcaegen2.platform,inventory-api,1.0.0
org.onap.dcaegen2.platform,servicechange-handler,1.0.0
+org.onap.dcaegen2.services,prh,1.0.0
+org.onap.dcaegen2.services.prh,prh-aai-client,1.0.0
+org.onap.dcaegen2.services.prh,prh-app-server,1.0.0
+org.onap.dcaegen2.services.prh,prh-dmaap-client,1.0.0
org.onap.dmaap.messagerouter.dmaapclient,dmaapClient,1.0.0
org.onap.dmaap.messagerouter.messageservice,dmaapMR1,1.0.1
org.onap.dmaap.messagerouter.mirroragent,dmaapMMAgent,1.0.0
@@ -90,22 +117,22 @@ org.onap.holmes.dsa,dmaap-dsa,1.0.0
org.onap.holmes.engine-management,holmes-engine-d,1.0.0
org.onap.holmes.rule-management,holmes-rulemgt,1.0.0
org.onap.modeling.toscaparsers,Checker,1.0.0
+org.onap.modeling.toscaparsers,Service,1.0.0
org.onap.modeling.toscaparsers,kwalify,1.0.0
org.onap.modeling.toscaparsers.nfvparser,modeling-toscaparsers-nfvparser,1.0.0
-org.onap.modeling.toscaparsers,Service,1.0.0
org.onap.msb.java-sdk,msb-java-sdk,1.0.0
org.onap.msb.swagger-sdk,swagger-sdk,1.0.0
org.onap.multicloud.framework,multivimbroker,1.0.0
org.onap.multicloud.openstack,newton,1.0.0
org.onap.multicloud.openstack,ocata,1.0.0
+org.onap.multicloud.openstack,windriver,1.0.0
org.onap.multicloud.openstack.vmware,vesagent,1.0.0
org.onap.multicloud.openstack.vmware,vio,1.0.0
-org.onap.multicloud.openstack,windriver,1.0.0
org.onap.oparent,oparent,1.1.0
-org.onap.policy.common,common-modules,1.1.1
-org.onap.policy.drools-applications,drools-pdp-apps,1.1.1
-org.onap.policy.drools-pdp,drools-pdp,1.1.1
-org.onap.policy.engine,PolicyEngineSuite,1.1.1
+org.onap.policy.common,common-modules,2.1.0
+org.onap.policy.drools-applications,drools-pdp-apps,2.1.0
+org.onap.policy.drools-pdp,drools-pdp,2.1.0
+org.onap.policy.engine,PolicyEngineSuite,2.1.0
org.onap.portal.sdk,epsdk-analytics,1.3.2
org.onap.portal.sdk,epsdk-app-common,1.3.2
org.onap.portal.sdk,epsdk-app-overlay,1.3.2
@@ -116,8 +143,8 @@ org.onap.sdc.sdc-workflow-designer,sdc-workflow-designer,1.0.0
org.onap.sdnc.northbound,generic-resource-api.provider,1.2.2
org.onap.sdnc.northbound,vnfapi-provider,1.2.2
org.onap.sdnc.northbound,vnftools-provider,1.2.2
-org.onap.usecase-ui.server,usecase-ui-server,1.0.1
org.onap.usecase-ui,usecaseui-common,1.0.1
+org.onap.usecase-ui.server,usecase-ui-server,1.0.1
org.onap.vfc.gvnfm.vnflcm.lcm,vfc-gvnfm-vnflcm-lcm,1.0.1
org.onap.vfc.gvnfm.vnfmgr.mgr,vfc-gvnfm-vnfmgr-mgr,1.0.1
org.onap.vfc.gvnfm.vnfres.res,vfc-gvnfm-vnfres-res,1.0.1
@@ -125,14 +152,21 @@ org.onap.vfc.nfvo.catalog,vfc-nfvo-catalog,1.0.2
org.onap.vfc.nfvo.driver.ems.ems,emsdriver-standalone,1.0.1
org.onap.vfc.nfvo.driver.sfc.zte.sfc-driver-standalone,vfc-nfvo-driver-sfc-zte-sfc-driver,1.0.0
org.onap.vfc.nfvo.driver.vnfm.gvnfm.gvnfmadapter,vfc-nfvo-driver-vnfm-gvnfm-gvnfmadapter,1.0.1
-org.onap.vfc.nfvo.driver.vnfm.svnfm.huawei.vnfmadapter,hw-vnfmadapter-deployment,1.0.0
org.onap.vfc.nfvo.driver.vnfm.svnfm,vfcadaptor-deployment,1.0.1
+org.onap.vfc.nfvo.driver.vnfm.svnfm.huawei.vnfmadapter,hw-vnfmadapter-deployment,1.0.0
org.onap.vfc.nfvo.driver.vnfm.svnfm.zte.vmanager,vfc-nfvo-driver-vnfm-svnfm-zte-vmanager,1.0.2
org.onap.vfc.nfvo.lcm,vfc-nfvo-lcm,1.0.2
org.onap.vfc.nfvo.resmanagement,vfc-nfvo-resmanagement-deployment,1.0.0
org.onap.vfc.nfvo.wfengine,activiti-extension,1.0.0
org.onap.vfc.nfvo.wfengine,workflow-engine-mgr-service,1.0.0
-org.onap.vnfsdk.refrepo,vnfmarket,1.0.0
+org.onap.vnfsdk.refrepo,vnf-sdk-marketplace,1.1.0
+org.onap.vnfsdk.refrepo,vnf-sdk-marketplace-core-parent,1.1.0
+org.onap.vnfsdk.refrepo,vnf-sdk-marketplace-deployment,1.1.0
+org.onap.vnfsdk.refrepo,vnfmarket,1.1.0
+org.onap.vnfsdk.refrepo,vnfmarket-deployment,1.1.0
+org.onap.vnfsdk.validation,csarvalidation-deployment,1.1.0
+org.onap.vnfsdk.validation,validation,1.1.1
+org.onap.vnfsdk.validation,validation-csar,1.1.1
org.onap.vnfsdk.ves-agent,evel_javalib2,1.1.0
org.openecomp.appc,appc-aai-client-provider,1.2.0
org.openecomp.appc,appc-ansible-adapter-bundle,1.2.0
@@ -185,31 +219,35 @@ org.openecomp.appc,appc-ssh-adapter-tests,1.2.0
org.openecomp.appc,appc-workflow-management-api,1.2.0
org.openecomp.appc,appc-workflow-management-core,1.2.0
org.openecomp.appc,appc-yang-generator,1.2.0
-org.openecomp.appc.client,client-kit,1.2.0
-org.openecomp.appc.client,client-lib,1.2.0
-org.openecomp.appc.client,client-simulator,1.2.0
-org.openecomp.appc.client,code-generator,1.2.0
org.openecomp.appc,execution-queue-management-lib,1.2.0
org.openecomp.appc,lock-manager-api,1.2.0
org.openecomp.appc,lock-manager-impl,1.2.0
-org.openecomp.appc.plugins,dg-loader-provider,1.2.0
org.openecomp.appc,state-machine-lib,1.2.0
org.openecomp.appc,transaction-recorder,1.2.0
+org.openecomp.appc.client,client-kit,1.2.0
+org.openecomp.appc.client,client-lib,1.2.0
+org.openecomp.appc.client,client-simulator,1.2.0
+org.openecomp.appc.client,code-generator,1.2.0
+org.openecomp.appc.plugins,dg-loader-provider,1.2.0
org.openecomp.sdc.jtosca,jtosca,1.1.1
org.openecomp.sdc.sdc-distribution-client,sdc-main-distribution-client,1.1.32
org.openecomp.sdc.sdc-titan-cassandra,sdc-titan-cassandra,1.0.0
org.openecomp.sdc.sdc-tosca,sdc-tosca,1.1.32
-org.openecomp.so.adapters,mso-adapters-rest-interface,1.1.1
+org.openecomp.so,MSOCommonBPMN,1.1.1
+org.openecomp.so,MSOCoreBPMN,1.1.1
+org.openecomp.so,MSOInfrastructureBPMN,1.1.1
+org.openecomp.so,MSORESTClient,1.1.1
+org.openecomp.so,common,1.1.1
+org.openecomp.so,mso-api-handler-common,1.1.1
+org.openecomp.so,mso-catalog-db,1.1.1
+org.openecomp.so,mso-requests-db,1.1.1
+org.openecomp.so,status-control,1.1.1
org.openecomp.so.adapters,mso-adapter-utils,1.1.1
+org.openecomp.so.adapters,mso-adapters-rest-interface,1.1.1
org.openecomp.so.adapters,mso-network-adapter-async-client,1.1.1
org.openecomp.so.adapters,mso-vnf-adapter-async-client,1.1.1
-org.openecomp.so,common,1.1.1
org.openecomp.so.libs.openstack-java-sdk,ceilometer-client,1.1.0
org.openecomp.so.libs.openstack-java-sdk,ceilometer-model,1.1.0
-org.openecomp.so.libs.openstack-java-sdk.client-connectors,http-connector,1.1.0
-org.openecomp.so.libs.openstack-java-sdk.client-connectors,jersey2-connector,1.1.0
-org.openecomp.so.libs.openstack-java-sdk.client-connectors,jersey-connector,1.1.0
-org.openecomp.so.libs.openstack-java-sdk.client-connectors,resteasy-connector,1.1.0
org.openecomp.so.libs.openstack-java-sdk,glance-client,1.1.0
org.openecomp.so.libs.openstack-java-sdk,glance-model,1.1.0
org.openecomp.so.libs.openstack-java-sdk,heat-client,1.1.0
@@ -223,12 +261,8 @@ org.openecomp.so.libs.openstack-java-sdk,quantum-client,1.1.0
org.openecomp.so.libs.openstack-java-sdk,quantum-model,1.1.0
org.openecomp.so.libs.openstack-java-sdk,swift-client,1.1.0
org.openecomp.so.libs.openstack-java-sdk,swift-model,1.1.0
-org.openecomp.so,mso-api-handler-common,1.1.1
-org.openecomp.so,mso-catalog-db,1.1.1
-org.openecomp.so,MSOCommonBPMN,1.1.1
-org.openecomp.so,MSOCoreBPMN,1.1.1
-org.openecomp.so,MSOInfrastructureBPMN,1.1.1
-org.openecomp.so,mso-requests-db,1.1.1
-org.openecomp.so,MSORESTClient,1.1.1
-org.openecomp.so,status-control,1.1.1
+org.openecomp.so.libs.openstack-java-sdk.client-connectors,http-connector,1.1.0
+org.openecomp.so.libs.openstack-java-sdk.client-connectors,jersey-connector,1.1.0
+org.openecomp.so.libs.openstack-java-sdk.client-connectors,jersey2-connector,1.1.0
+org.openecomp.so.libs.openstack-java-sdk.client-connectors,resteasy-connector,1.1.0
org.openecomp.vid,asdcclient,1.0.2
diff --git a/version-manifest/src/main/scripts/check-sorted.sh b/version-manifest/src/main/scripts/check-sorted.sh
new file mode 100755
index 000000000..d926409f4
--- /dev/null
+++ b/version-manifest/src/main/scripts/check-sorted.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+LC_ALL=C sort -c $1
+retval=$?
+if [ $retval -ne 0 ]; then
+ echo
+ echo "ERROR: $1 is not properly sorted. Please sort it with the following commands:"
+ echo
+ echo " LC_ALL=C sort < $1 > $1.tmp"
+ echo " mv $1.tmp $1"
+ echo
+fi
+exit $retval
diff --git a/version.properties b/version.properties
index 49e2720ae..17a717c1c 100644
--- a/version.properties
+++ b/version.properties
@@ -5,7 +5,7 @@
major_version=1
minor_version=0
-patch_version=0
+patch_version=1
base_version=${major_version}.${minor_version}.${patch_version}