aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--deployment/heat/onap-oom/onap-oom.yaml20
-rw-r--r--deployment/heat/onap-oom/parts/onap-oom-1.yaml2
-rw-r--r--deployment/heat/onap-oom/parts/onap-oom-2.yaml3
-rwxr-xr-xdeployment/heat/onap-oom/scripts/deploy.sh61
-rwxr-xr-xdeployment/heat/onap-oom/scripts/gen-onap-oom-yaml.sh4
-rw-r--r--test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml12
-rwxr-xr-xtest/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh10
-rwxr-xr-xtest/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh2
-rw-r--r--test/csit/plans/dcaegen2/prh-testsuites/setup.sh22
-rwxr-xr-xtest/csit/plans/dmaap-buscontroller/with_dr/setup.sh57
-rwxr-xr-xtest/csit/plans/dmaap-buscontroller/with_dr/teardown.sh26
-rwxr-xr-xtest/csit/plans/dmaap-buscontroller/with_dr/testplan.txt2
-rwxr-xr-xtest/csit/plans/dmaap-datarouter/dr-suite/setup.sh1
-rwxr-xr-xtest/csit/plans/music/music-test-plan/setup.sh18
-rwxr-xr-xtest/csit/plans/music/music-test-plan/teardown.sh19
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh16
-rw-r--r--test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh6
-rwxr-xr-xtest/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh6
-rwxr-xr-xtest/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh2
-rwxr-xr-xtest/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh11
-rw-r--r--test/csit/scripts/dmaap-buscontroller/dr-launch.sh59
-rwxr-xr-xtest/csit/scripts/policy/script1.sh2
-rw-r--r--test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot1
-rw-r--r--test/csit/tests/clamp/UIs/02__Create_TCA_model.robot1
-rw-r--r--test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py66
-rw-r--r--test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot (renamed from test/csit/tests/dcaegen2-collectors-hv-ves/testcases/hv-ves.robot)55
-rw-r--r--test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot6
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/__init__.robot2
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot36
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py22
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml21
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot7
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py1
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator10
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py3
-rw-r--r--test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator10
-rwxr-xr-xtest/csit/tests/dmaap-buscontroller/with_dr/orig116
-rw-r--r--test/csit/tests/dmaap-buscontroller/with_dr/test1.robot143
-rw-r--r--test/csit/tests/music/music-suite/music-test.robot25
-rw-r--r--test/csit/tests/policy/suite1/global_properties.robot4
-rw-r--r--test/csit/tests/vfc/nfvo-wfengine/workflow.robot58
-rw-r--r--test/csit/tests/vid/resources/simulators/SDC_simulator10
-rw-r--r--test/ete/labs/gwu/onap-openstack-template.env4
-rw-r--r--test/ete/labs/huawei/onap-openstack-template.env4
-rw-r--r--test/ete/labs/tlab/onap-openstack-template.env4
-rw-r--r--test/ete/labs/windriver/onap-openstack-template.env4
-rwxr-xr-xtest/ete/scripts/deploy-onap.sh2
-rwxr-xr-xtest/ete/scripts/teardown-onap.sh22
-rw-r--r--test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml129
-rw-r--r--version-manifest/src/main/resources/docker-manifest-staging.csv6
-rw-r--r--version-manifest/src/main/resources/docker-manifest.csv2
51 files changed, 873 insertions, 262 deletions
diff --git a/deployment/heat/onap-oom/onap-oom.yaml b/deployment/heat/onap-oom/onap-oom.yaml
index 5c99bdb75..e8b0ffa7d 100644
--- a/deployment/heat/onap-oom/onap-oom.yaml
+++ b/deployment/heat/onap-oom/onap-oom.yaml
@@ -138,6 +138,8 @@ resources:
router:
type: OS::Neutron::Router
properties:
+ name:
+ list_join: ['-', [{ get_param: 'OS::stack_name' }, 'router']]
external_gateway_info:
network: { get_param: public_net_id }
@@ -164,7 +166,8 @@ resources:
rancher_vm:
type: OS::Nova::Server
properties:
- name: rancher
+ name:
+ list_join: ['-', [{ get_param: 'OS::stack_name' }, 'rancher']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: rancher_vm_flavor }
key_name: { get_param: key_name }
@@ -227,7 +230,8 @@ resources:
k8s_1_vm:
type: OS::Nova::Server
properties:
- name: k8s_1
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, 'k8s_1']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
@@ -262,7 +266,8 @@ resources:
k8s_2_vm:
type: OS::Nova::Server
properties:
- name: k8s_2
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, 'k8s_2']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
@@ -297,7 +302,8 @@ resources:
k8s_3_vm:
type: OS::Nova::Server
properties:
- name: k8s_3
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, 'k8s_3']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
@@ -332,7 +338,8 @@ resources:
k8s_4_vm:
type: OS::Nova::Server
properties:
- name: k8s_4
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, 'k8s_4']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
@@ -367,7 +374,8 @@ resources:
k8s_5_vm:
type: OS::Nova::Server
properties:
- name: k8s_5
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, 'k8s_5']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
diff --git a/deployment/heat/onap-oom/parts/onap-oom-1.yaml b/deployment/heat/onap-oom/parts/onap-oom-1.yaml
index 8031505b7..ab79b1ed5 100644
--- a/deployment/heat/onap-oom/parts/onap-oom-1.yaml
+++ b/deployment/heat/onap-oom/parts/onap-oom-1.yaml
@@ -135,6 +135,8 @@ resources:
router:
type: OS::Neutron::Router
properties:
+ name:
+ list_join: ['-', [{ get_param: 'OS::stack_name' }, 'router']]
external_gateway_info:
network: { get_param: public_net_id }
diff --git a/deployment/heat/onap-oom/parts/onap-oom-2.yaml b/deployment/heat/onap-oom/parts/onap-oom-2.yaml
index 463635b8a..e01ba132d 100644
--- a/deployment/heat/onap-oom/parts/onap-oom-2.yaml
+++ b/deployment/heat/onap-oom/parts/onap-oom-2.yaml
@@ -15,7 +15,8 @@
${K8S_VM_NAME}_vm:
type: OS::Nova::Server
properties:
- name: ${K8S_VM_NAME}
+ name:
+ list_join: ['-', [ { get_param: 'OS::stack_name' }, '${K8S_VM_NAME}']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: k8s_vm_flavor }
key_name: { get_param: key_name }
diff --git a/deployment/heat/onap-oom/scripts/deploy.sh b/deployment/heat/onap-oom/scripts/deploy.sh
index c9cd005c7..c3418faf4 100755
--- a/deployment/heat/onap-oom/scripts/deploy.sh
+++ b/deployment/heat/onap-oom/scripts/deploy.sh
@@ -1,4 +1,4 @@
-#!/bin/bash -x
+#!/bin/bash
#
# Copyright 2018 Huawei Technologies Co., Ltd.
#
@@ -9,18 +9,45 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
-install_name="onap-oom"
+stack_name="oom"
full_deletion=false
if [ -z "$WORKSPACE" ]; then
export WORKSPACE=`git rev-parse --show-toplevel`
fi
-usage() { echo "Usage: $0 [ -r ] <env-name>" 1>&2; exit 1; }
+usage() {
+ echo "Usage: $0 [ -n <number of VMs {2-15}> ] [ -s <stack name> ][ -b <branch name> ][ -r ] <env>" 1>&2;
+ echo "n: Set the number of VM's that will be installed. This number must be between 2 and 15" 1>&2;
+ echo "s: Set the name to be used for stack. This name will be used for naming of resources" 1>&2;
+ echo "r: Delete all resources relating to ONAP within enviroment." 1>&2;
+ echo "q: Quiet Delete of all ONAP resources." 1>&2;
-while getopts ":rq" o; do
+ exit 1;
+}
+
+
+while getopts ":n:s:rq" o; do
case "${o}" in
+ n)
+ if [[ ${OPTARG} =~ ^[0-9]+$ ]];then
+ if [ ${OPTARG} -ge 2 -a ${OPTARG} -le 15 ]; then
+ vm_num=${OPTARG}
+ else
+ usage
+ fi
+ else
+ usage
+ fi
+ ;;
+ s)
+ if [[ ! ${OPTARG} =~ ^[0-9]+$ ]];then
+ stack_name=${OPTARG}
+ else
+ usage
+ fi
+ ;;
r)
echo "The following command will delete all information relating to onap within your enviroment"
read -p "Are you certain this is what you want? (type y to confirm):" answer
@@ -56,6 +83,13 @@ fi
ENV_FILE=$1
+if [ ! -f $ENV_FILE ];then
+ echo ENV file does not exist or was not given
+ exit 1
+fi
+
+set -x
+
SSH_KEY=~/.ssh/onap_key
source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
@@ -65,23 +99,28 @@ export OS_PASSWORD_ENCRYPTED=$(echo -n "$OS_PASSWORD" | openssl aes-128-ecb -e -
for n in $(seq 1 5); do
if [ $full_deletion = true ] ; then
- $WORKSPACE/test/ete/scripts/teardown-onap.sh -n $install_name -q
+ $WORKSPACE/test/ete/scripts/teardown-onap.sh -n $stack_name -q
else
- $WORKSPACE/test/ete/scripts/teardown-onap.sh -n $install_name
+ $WORKSPACE/test/ete/scripts/teardown-onap.sh -n $stack_name
fi
cd $WORKSPACE/deployment/heat/onap-oom
envsubst < $ENV_FILE > $ENV_FILE~
+ if [ -z "$vm_num" ]; then
+ cp onap-oom.yaml onap-oom.yaml~
+ else
+ ./scripts/gen-onap-oom-yaml.sh $vm_num > onap-oom.yaml~
+ fi
- if ! openstack stack create -t ./$install_name.yaml -e $ENV_FILE~ $install_name; then
+ if ! openstack stack create -t ./onap-oom.yaml~ -e $ENV_FILE~ $stack_name; then
break
fi
- while [ "CREATE_IN_PROGRESS" == "$(openstack stack show -c stack_status -f value $install_name)" ]; do
+ while [ "CREATE_IN_PROGRESS" == "$(openstack stack show -c stack_status -f value $stack_name)" ]; do
sleep 20
done
- STATUS=$(openstack stack show -c stack_status -f value $install_name)
+ STATUS=$(openstack stack show -c stack_status -f value $stack_name)
echo $STATUS
if [ "CREATE_COMPLETE" != "$STATUS" ]; then
break
@@ -89,8 +128,8 @@ for n in $(seq 1 5); do
for i in $(seq 1 30); do
sleep 30
- RANCHER_IP=$(openstack stack output show $install_name rancher_vm_ip -c output_value -f value)
- K8S_IP=$(openstack stack output show $install_name k8s_1_vm_ip -c output_value -f value)
+ RANCHER_IP=$(openstack stack output show $stack_name rancher_vm_ip -c output_value -f value)
+ K8S_IP=$(openstack stack output show $stack_name k8s_1_vm_ip -c output_value -f value)
timeout 1 ping -c 1 "$RANCHER_IP" && break
done
diff --git a/deployment/heat/onap-oom/scripts/gen-onap-oom-yaml.sh b/deployment/heat/onap-oom/scripts/gen-onap-oom-yaml.sh
index 092b2a1fc..41c5de16a 100755
--- a/deployment/heat/onap-oom/scripts/gen-onap-oom-yaml.sh
+++ b/deployment/heat/onap-oom/scripts/gen-onap-oom-yaml.sh
@@ -16,7 +16,6 @@ if [ "$#" -ne 1 ]; then
fi
NUM_K8S_VMS=$1
-
if [ -z "$WORKSPACE" ]; then
export WORKSPACE=`git rev-parse --show-toplevel`
fi
@@ -34,7 +33,8 @@ cat <<EOF
rancher_vm:
type: OS::Nova::Server
properties:
- name: rancher
+ name:
+ list_join: ['-', [{ get_param: 'OS::stack_name' }, 'rancher']]
image: { get_param: ubuntu_1604_image }
flavor: { get_param: rancher_vm_flavor }
key_name: { get_param: key_name }
diff --git a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml
index 28cded8cb..66cbde22f 100644
--- a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml
+++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml
@@ -42,11 +42,17 @@ services:
command: ["-server", "-bootstrap"]
ves-hv-collector:
- image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main
+ image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main:latest
ports:
+ - "6060:6060"
- "6061:6061/tcp"
entrypoint: ["java", "-Dio.netty.leakDetection.level=paranoid", "-cp", "*:", "org.onap.dcae.collectors.veshv.main.MainKt"]
command: ["--listen-port", "6061","--config-url", "http://consul:8500/v1/kv/veshv-config"]
+ healthcheck:
+ interval: 10s
+ timeout: 5s
+ retries: 2
+ test: "curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061"
depends_on:
- kafka
volumes:
@@ -55,7 +61,7 @@ services:
- ves-hv-default
dcae-app-simulator:
- image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-dcae-app-simulator
+ image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-dcae-app-simulator:latest
ports:
- "6063:6063/tcp"
command: ["--listen-port", "6063", "--kafka-bootstrap-servers", "kafka:9092", "--kafka-topics", "ves_hvRanMeas"]
@@ -63,7 +69,7 @@ services:
interval: 10s
timeout: 5s
retries: 2
- test: ["CMD", "curl", "--request", "GET", "--fail", "--silent", "--show-error", "localhost:6063/healthcheck"]
+ test: "curl --request GET --fail --silent --show-error localhost:6063/healthcheck"
depends_on:
- kafka
networks:
diff --git a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh
index 72dacf6a2..6b527fc22 100755
--- a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh
+++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh
@@ -25,16 +25,8 @@ make FILE=invalid_client CA=invalid_trust
cd ..
export DOCKER_REGISTRY="nexus3.onap.org:10001"
-CURRENT_DIR=${PWD##*/}
-VES_HV_CONTAINER_NAME=ves-hv-collector
-
-# little race condition between container start-up and required files copying below
docker-compose up -d
-COMPOSE_VES_HV_CONTAINER_NAME=${CURRENT_DIR}_${VES_HV_CONTAINER_NAME}_1
-echo "COPY tls authorization files to container: ${COMPOSE_VES_HV_CONTAINER_NAME}"
-docker cp ssl/. ${COMPOSE_VES_HV_CONTAINER_NAME}:/etc/ves-hv
-# race condition end
-
+mkdir ${WORKSPACE}/archives/containers_logs
export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries" \ No newline at end of file
diff --git a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh
index 91ad90305..84d36667e 100755
--- a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh
+++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh
@@ -4,7 +4,7 @@ cd ssl
make clean
cd ..
-docker-compose logs > ${WORKSPACE}/archives/docker-compose.log
+docker-compose logs > ${WORKSPACE}/archives/containers_logs/docker-compose.log
docker-compose down
docker-compose rm -f
diff --git a/test/csit/plans/dcaegen2/prh-testsuites/setup.sh b/test/csit/plans/dcaegen2/prh-testsuites/setup.sh
index a5ce48b52..52167bf5c 100644
--- a/test/csit/plans/dcaegen2/prh-testsuites/setup.sh
+++ b/test/csit/plans/dcaegen2/prh-testsuites/setup.sh
@@ -8,26 +8,11 @@ export AAI_SIMULATOR="aai_simulator"
cd ${WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/resources/
-docker login -u docker -p docker nexus3.onap.org:10001
pip uninstall -y docker-py
pip uninstall -y docker
pip install -U docker
docker-compose up -d --build
-# Wait for initialization of Docker containers
-for i in {1..10}; do
- if [ $(docker inspect --format '{{ .State.Running }}' ${PRH_SERVICE}) ] && \
- [ $(docker inspect --format '{{ .State.Running }}' ${DMAAP_SIMULATOR}) ] && \
- [ $(docker inspect --format '{{ .State.Running }}' ${AAI_SIMULATOR}) ]
- then
- echo "dmaap_simulator, aai_simulator and prh services are running"
- break
- else
- echo sleep ${i}
- sleep ${i}
- fi
-done
-
PRH_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${PRH_SERVICE})
DMAAP_SIMULATOR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${DMAAP_SIMULATOR})
AAI_SIMULATOR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${AAI_SIMULATOR})
@@ -47,12 +32,5 @@ for i in {1..10}; do
sleep ${i}
done
-docker stop prh
-docker cp prh:/config/prh_endpoints.json ${WORKDIR}
-sed -i -e 's/"dmaapHostName":.*/"dmaapHostName": "'${DMAAP_SIMULATOR_IP}'",/g' ${WORKDIR}/prh_endpoints.json
-sed -i -e 's/"aaiHost":.*/"aaiHost": "'${AAI_SIMULATOR_IP}'",/g' ${WORKDIR}/prh_endpoints.json
-docker cp ${WORKDIR}/prh_endpoints.json prh:/config/
-docker start prh
-
# #Pass any variables required by Robot test suites in ROBOT_VARIABLES
ROBOT_VARIABLES="-v DMAAP_SIMULATOR:${DMAAP_SIMULATOR_IP}:2222 -v AAI_SIMULATOR:${AAI_SIMULATOR_IP}:3333 -v PRH:${PRH_IP}:8100"
diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh b/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh
new file mode 100755
index 000000000..7cefa7270
--- /dev/null
+++ b/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+#
+
+source ${SCRIPTS}/common_functions.sh
+
+
+if [ "$USE_EXISTING_DMAAP" = "Y" ]
+then
+ ROBOT_VARIABLES="-v AAF_IP:0.0.0 -v MRC_IP:0.0.0.0 -v DRPS_IP:172.17.0.3 -v DMAAPBC_IP:172.17.0.4"
+else
+
+ # Place the scripts in run order:
+ source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dr-launch.sh
+ dmaap_dr_launch
+ DRPS_IP=${IP}
+
+ #source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/start-mock.sh
+ #start_mock "aaf"
+ #AAF_IP=${IP}
+ AAF_IP=0.0.0.0
+ #start_mock "drps"
+ #DRPS_IP=${IP}
+ MRC_IP=0.0.0.0
+
+ source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh
+ dmaapbc_launch $AAF_IP $MRC_IP $DRPS_IP
+ DMAAPBC_IP=${IP}
+
+
+ echo "AAF_IP=$AAF_IP MRC_IP=$MRC_IP DRPS_IP=$DRPS_IP DMAAPBC_IP=$DMAAPBC_IP"
+
+ # Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ ROBOT_VARIABLES="-v AAF_IP:${AAF_IP} -v MRC_IP:${MRC_IP} -v DRPS_IP:${DRPS_IP} -v DMAAPBC_IP:${DMAAPBC_IP}"
+ set -x
+ ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh ${DMAAPBC_IP} ${DRPS_IP} ${MRC_IP} https
+ set +x
+fi
+
diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh b/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh
new file mode 100755
index 000000000..23ae60a10
--- /dev/null
+++ b/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+#
+# ============LICENSE_START=======================================================
+# org.onap.dmaap
+# ================================================================================
+# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+if [ "$KEEP_DMAAP" != "Y" ]
+then
+kill-instance.sh dmaapbc
+cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/
+docker-compose down -v
+fi
diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt b/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt
new file mode 100755
index 000000000..04c6838d4
--- /dev/null
+++ b/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt
@@ -0,0 +1,2 @@
+# Place the suites in run order.
+dmaap-buscontroller/with_dr
diff --git a/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh b/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh
index 80f191d0a..e5debfc2b 100755
--- a/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh
+++ b/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh
@@ -11,6 +11,7 @@ cd datarouter
git pull
cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/
+sed -i 's/10003/10001/g' docker-compose.yml
# start DMaaP DR containers with docker compose and configuration from docker-compose.yml
docker login -u docker -p docker nexus3.onap.org:10001
docker-compose up -d
diff --git a/test/csit/plans/music/music-test-plan/setup.sh b/test/csit/plans/music/music-test-plan/setup.sh
index ddfdfc023..ce5d1085a 100755
--- a/test/csit/plans/music/music-test-plan/setup.sh
+++ b/test/csit/plans/music/music-test-plan/setup.sh
@@ -27,7 +27,7 @@ source ${WORKSPACE}/test/csit/scripts/music/music-scripts/music_script.sh
echo "# music configuration step";
CASS_IMG=nexus3.onap.org:10001/onap/music/cassandra_music:latest
-TOMCAT_IMG=nexus3.onap.org:10001/library/tomcat:8.0
+TOMCAT_IMG=nexus3.onap.org:10001/library/tomcat:8.5
ZK_IMG=nexus3.onap.org:10001/library/zookeeper:3.4
MUSIC_IMG=nexus3.onap.org:10001/onap/music/music:latest
WORK_DIR=/tmp/music
@@ -38,6 +38,8 @@ MUSIC_PROPERTIES=/tmp/music/properties
MUSIC_LOGS=/tmp/music/logs
mkdir -p ${MUSIC_PROPERTIES}
mkdir -p ${MUSIC_LOGS}
+mkdir -p ${MUSIC_LOGS}/MUSIC
+
cp ${MUSIC_SOURCE_PROPERTIES}/* ${WORK_DIR}/properties
@@ -77,6 +79,19 @@ echo "TOMCAT_IP=${TOMCAT_IP}"
${WORKSPACE}/test/csit/scripts/music/music-scripts/wait_for_port.sh ${TOMCAT_IP} 8080
+sleep 20;
+echo "get the tomcat logs to make sure its running music properly"
+echo "======== TOMCAT Logs =============="
+docker logs music-tomcat
+# Needed only if we need to look at localhost logs.
+#echo "===== MUSIC localhost Log ===================="
+#docker exec music-tomcat /bin/bash -c "cat /usr/local/tomcat/logs/localhost*"
+
+echo "===== MUSIC Log ===================="
+ls -al $MUSIC_LOGS/MUSIC
+docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/music.log"
+echo "===== MUSIC error log =================="
+docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/error.log"
echo "inspect docker things for tracing purpose"
docker inspect music-db
@@ -89,6 +104,7 @@ docker network inspect music-net
echo "dump music content just after music is started"
docker exec music-db /usr/bin/nodetool status
docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces'
+docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'DESCRIBE keyspace admin'
docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master'
diff --git a/test/csit/plans/music/music-test-plan/teardown.sh b/test/csit/plans/music/music-test-plan/teardown.sh
index e9982ae27..a5f74238c 100755
--- a/test/csit/plans/music/music-test-plan/teardown.sh
+++ b/test/csit/plans/music/music-test-plan/teardown.sh
@@ -19,6 +19,18 @@
#
# add here below the killing of all docker containers used for music CSIT testing
#
+echo "dump music.log files"
+ls -alF /tmp/music
+ls -alFR /tmp/music
+ls -alF /tmp/music/properties
+cat /tmp/music/properties/music.properties
+echo "===== MUSIC log =================="
+docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/music.log"
+#cat /tmp/music/logs/MUSIC/music.log
+echo "===== MUSIC error log =================="
+docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/error.log"
+#cat /tmp/music/logs/MUSIC/error.log
+
echo "##########################################################";
echo "#";
echo "# music scripts docker containers killing";
@@ -39,13 +51,6 @@ sleep 5;
docker volume rm music-vol
-echo "dump music.log files"
-ls -alF /tmp/music
-ls -alF /tmp/music/properties
-cat /tmp/music/properties/music.properties
-cat /tmp/music/logs/MUSIC/music.log
-cat /tmp/music/logs/MUSIC/error.log
-
#rm -Rf /tmp/music
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
index 5a578230b..f990aa5a7 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh
@@ -24,10 +24,10 @@ source ${SCRIPTS}/common_functions.sh
docker run -d -p 8500:8500 --name msb_consul consul:0.9.3
MSB_CONSUL_IP=`get-instance-ip.sh msb_consul`
echo MSB_CONSUL_IP=${MSB_CONSUL_IP}
-docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery
+docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery:1.1.0
MSB_DISCOVERY_IP=`get-instance-ip.sh msb_discovery`
echo MSB_DISCOVERY_IP=${MSB_DISCOVERY_IP}
-docker run -d -p 80:80 -e CONSUL_IP=$MSB_CONSUL_IP -e SDCLIENT_IP=$MSB_DISCOVERY_IP -e "ROUTE_LABELS=visualRange:1" --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway
+docker run -d -p 80:80 -e CONSUL_IP=$MSB_CONSUL_IP -e SDCLIENT_IP=$MSB_DISCOVERY_IP --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway:1.1.0
MSB_IAG_IP=`get-instance-ip.sh msb_internal_apigateway`
echo MSB_IAG_IP=${MSB_IAG_IP}
@@ -39,8 +39,8 @@ for i in {1..10}; do
done
# wait for container initalization
-echo sleep 60
-sleep 60
+echo sleep 30
+sleep 30
ORG="onap"
PROJECT="vfc"
@@ -52,7 +52,8 @@ IMAGE_ACTIVITI_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
SERVICE_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
# start wfengine-activiti
-docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8804 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME}
+# docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8804 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME}
+docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_PORT=8080 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME}
WFENGINE_ACTIVITI_IP=`get-instance-ip.sh vfc_wfengine_activiti`
# Wait for initialization
@@ -72,7 +73,10 @@ IMAGE="wfengine-mgrservice"
IMAGE_MGRSERVICE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
# Start wfengine-mgrservice
-docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8805 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME}
+#docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8805 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME}
+# docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_PORT=10550 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME}
+docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_PORT=10550 -e OPENPALETTE_MSB_IP=${WFENGINE_ACTIVITI_IP} -e OPENPALETTE_MSB_PORT=8080 ${IMAGE_MGRSERVICE_NAME}
+
##docker run -d --name ${IMAGE} -e OPENPALETTE_MSB_IP=${WFENGINEACTIVITIR_IP} -e OPENPALETTE_MSB_PORT=8080 ${IMAGE_MGRSERVICE_NAME}
WFENGINE_MGRSERVICE_IP=`get-instance-ip.sh vfc_wfengine_mgrservice`
for i in {1..10}; do
diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
index 384bc3935..bca33569b 100644
--- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
+++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh
@@ -16,6 +16,12 @@
#
# This script is sourced by run-csit.sh after Robot test completion.
+echo === logs vfc_wfengine_activiti ===
+docker logs vfc_wfengine_activiti
+
+echo === logs vfc_wfengine_mgrservice ===
+docker logs vfc_wfengine_mgrservice
+
kill-instance.sh msb_internal_apigateway
kill-instance.sh msb_discovery
kill-instance.sh msb_consul
diff --git a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
index baffc17d1..e564e637e 100755
--- a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
+++ b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh
@@ -24,6 +24,10 @@
echo "This is ${WORKSPACE}/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh"
+firefox --version
+which firefox
+
+
# Clone Clamp repo to get extra folder that has all needed to run docker with docker-compose to start DB and Clamp
mkdir -p $WORKSPACE/archives/clamp-clone
cd $WORKSPACE/archives/clamp-clone
@@ -34,7 +38,7 @@ cd clamp/extra/docker/clamp/
sed -i '/image: onap\/clamp/c\ image: nexus3.onap.org:10001\/onap\/clamp' docker-compose.yml
# Change config to take third_party_proxy:8085 for SDC, Policy and DCAE simulator
-sed -i 's/}/,\"clamp.config.policy.pdpUrl1\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.pdpUrl2\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.papUrl\":\"http:\/\/third_party_proxy:8085\/pap\/ , testpap, alpha123\",\"clamp.config.policy.clientId\":\"python\",\"clamp.config.policy.clientKey\":\"dGVzdA==\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\",\"spring.profiles.active\":\"clamp-default,clamp-default-user,clamp-sdc-controller\"}/g' clamp.env
+sed -i 's/}/,\"clamp.config.policy.pdpUrl1\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.pdpUrl2\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.papUrl\":\"http:\/\/third_party_proxy:8085\/pap\/ , testpap, alpha123\",\"clamp.config.policy.clientId\":\"python\",\"clamp.config.policy.clientKey\":\"dGVzdA==\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\",\"spring.profiles.active\":\"clamp-default,clamp-default-user,clamp-sdc-controller\",\"server.ssl.client-auth\":\"want\"}/g' clamp.env
# Add the sql to create template so it is played by docker-compose later
cp ../../../src/test/resources/sql/four_templates_only.sql ../../sql/bulkload/
diff --git a/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh b/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh
index c7cf03ef4..804603f2b 100755
--- a/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh
+++ b/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh
@@ -25,7 +25,7 @@ cat << EOF > $JSON
{
"version": "1",
"topicNsRoot": "org.onap.dmaap",
- "drProvUrl": "http://${2}:${DRPORT}",
+ "drProvUrl": "${PROTO}://dmaap-dr-prov:${DRPORT}",
"dmaapName": "onapCSIT",
"bridgeAdminTopic": "MM_AGENT_PROV"
diff --git a/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh b/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh
index 688ce7d45..317c17f18 100755
--- a/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh
+++ b/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh
@@ -12,7 +12,16 @@ function dmaapbc_launch() {
TMP_CFG=/tmp/docker-databus-controller.conf
. ./onapCSIT.env > $TMP_CFG
- docker run -d --name $CONTAINER_NAME -v $TMP_CFG:/opt/app/config/conf $TAG
+ ADDHOSTS=""
+ if [ ! -z "$2" ]
+ then
+ ADDHOSTS="$ADDHOSTS --add-host=message-router:$2"
+ fi
+ if [ ! -z "$3" ]
+ then
+ ADDHOSTS="$ADDHOSTS --add-host=dmaap-dr-prov:$3"
+ fi
+ docker run -d $ADDHOSTS --name $CONTAINER_NAME -v $TMP_CFG:/opt/app/config/conf $TAG
IP=`get-instance-ip.sh ${CONTAINER_NAME}`
# Wait for initialization
diff --git a/test/csit/scripts/dmaap-buscontroller/dr-launch.sh b/test/csit/scripts/dmaap-buscontroller/dr-launch.sh
new file mode 100644
index 000000000..abc0aae87
--- /dev/null
+++ b/test/csit/scripts/dmaap-buscontroller/dr-launch.sh
@@ -0,0 +1,59 @@
+
+#!/bin/bash
+
+#!/bin/bash
+
+# script to launch DMaaP DR docker containers
+# sets global var IP with assigned IP address of DR Prov
+
+function dmaap_dr_launch() {
+ IP=""
+
+
+ # This next section was copied from scripts/dmaap-datarouter/dr-suite/setup.sh
+ # and slightly modified...
+
+ # Clone DMaaP Data Router repo
+ mkdir -p $WORKSPACE/archives/dmaapdr
+ cd $WORKSPACE/archives/dmaapdr
+
+ git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master
+ cd datarouter
+ git pull
+ cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/
+
+ sed -i 's/10003/10001/g' docker-compose.yml
+ # start DMaaP DR containers with docker compose and configuration from docker-compose.yml
+ docker login -u docker -p docker nexus3.onap.org:10001
+ docker-compose up -d
+
+ # Wait for initialization of Docker container for datarouter-node, datarouter-prov and mariadb
+ for i in {1..50}; do
+ if [ $(docker inspect --format '{{ .State.Running }}' datarouter-node) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' datarouter-prov) ] && \
+ [ $(docker inspect --format '{{ .State.Running }}' mariadb) ]
+ then
+ echo "DR Service Running"
+ break
+ else
+ echo sleep $i
+ sleep $i
+ fi
+ done
+
+ DR_PROV_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-prov)
+ DR_NODE_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-node)
+ DR_GATEWAY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.Gateway}}{{end}}' datarouter-prov)
+
+ echo DR_PROV_IP=${DR_PROV_IP}
+ echo DR_NODE_IP=${DR_NODE_IP}
+ echo DR_GATEWAY_IP=${DR_GATEWAY_IP}
+
+ docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=dmaap-dr-node\|$DR_GATEWAY_IP"
+ docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=dmaap-dr-prov\|$DR_GATEWAY_IP"
+
+ #Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP}"
+
+ IP=${DR_GATEWAY_IP}
+}
diff --git a/test/csit/scripts/policy/script1.sh b/test/csit/scripts/policy/script1.sh
index d2229aae8..7bb9731c8 100755
--- a/test/csit/scripts/policy/script1.sh
+++ b/test/csit/scripts/policy/script1.sh
@@ -160,7 +160,7 @@ INTERVAL=20
TIME=0
while [ "$TIME" -lt "$TIME_OUT" ]; do
- curl -i -v -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'ClientAuth: cHl0aG9uOnRlc3Q=' -H 'Authorization: Basic dGVzdHBkcDphbHBoYTEyMw==' -H 'Environment: TEST' -X POST -d '{"policyName": ".*"}' http://${PDP_IP}:8081/pdp/api/getConfig && break
+ curl -k -i -v -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'ClientAuth: cHl0aG9uOnRlc3Q=' -H 'Authorization: Basic dGVzdHBkcDphbHBoYTEyMw==' -H 'Environment: TEST' -X POST -d '{"policyName": ".*"}' https://${PDP_IP}:8081/pdp/api/getConfig && break
echo Sleep: $INTERVAL seconds before testing if Policy is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
sleep $INTERVAL
diff --git a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
index 305044cb0..e8b1429d0 100644
--- a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
+++ b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot
@@ -60,6 +60,7 @@ Set Properties for HolmesModel1
Select From List By Label id=vf vFirewall 0
Select From List By Label id=actionSet VNF
Select From List By Label id=location Data Center 2 Data Center 3
+ Input Text locator=deployParameters text={}
Click Button locator=Save
Set Policy Box properties for HolmesModel1
diff --git a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
index 0dc0a8abb..bdc537eab 100644
--- a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
+++ b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot
@@ -53,6 +53,7 @@ Set Properties for TCAModel1
Select From List By Label id=vf vLoadBalancer 0
Select From List By Label id=actionSet VNF
Select From List By Label id=location Data Center 1 Data Center 3
+ Input Text locator=deployParameters text={}
Click Button locator=Save
Set Policy Box properties for TCAModel1
diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py
index d85eb4dee..b2466d7ca 100644
--- a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py
+++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py
@@ -8,9 +8,12 @@ from time import sleep
XNF_SIMULATOR_NAME = "xNF Simulator"
SIMULATOR_IMAGE_NAME = "onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-xnf-simulator"
SIMULATOR_IMAGE_FULL_NAME = os.getenv("DOCKER_REGISTRY") + "/" + SIMULATOR_IMAGE_NAME + ":latest"
-certificates_dir_path = os.getenv("WORKSPACE") + "/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/"
+WORKSPACE_ENV = os.getenv("WORKSPACE")
+certificates_dir_path = WORKSPACE_ENV + "/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/"
+collector_certs_lookup_dir = "/etc/ves-hv/"
ONE_SECOND_IN_NANOS = 10 ** 9
+
class XnfSimulatorLibrary:
def start_xnf_simulators(self, list_of_ports, valid_certs=True):
@@ -36,48 +39,43 @@ class XnfSimulatorLibrary:
simulators_addresses = []
for port in list_of_ports:
container = self.run_simulator(dockerClient, port,
- "/etc/ves-hv/" + cert_name_prefix + "client.crt",
- "/etc/ves-hv/" + cert_name_prefix + "client.key",
- "/etc/ves-hv/" + cert_name_prefix + "trust.crt"
+ collector_certs_lookup_dir + cert_name_prefix + "client.crt",
+ collector_certs_lookup_dir + cert_name_prefix + "client.key",
+ collector_certs_lookup_dir + cert_name_prefix + "trust.crt"
)
- self.copy_required_certificates_into_simulator(container)
logger.info("Started container: " + container.name + " " + container.id)
simulators_addresses.append(container.name + ":" + port)
return simulators_addresses
def run_simulator(self, dockerClient, port, client_crt_path, client_key_path, client_trust_store):
+ xNF_startup_command = ["--listen-port", port,
+ "--ves-host", "ves-hv-collector",
+ "--ves-port", "6061",
+ "--cert-file", client_crt_path,
+ "--private-key-file", client_key_path,
+ "--trust-cert-file", client_trust_store]
+ xNF_healthcheck_command = {
+ "interval": 5 * ONE_SECOND_IN_NANOS,
+ "timeout": 3 * ONE_SECOND_IN_NANOS,
+ "retries": 1,
+ "test": ["CMD", "curl", "--request", "GET",
+ "--fail", "--silent", "--show-error",
+ "localhost:" + port + "/healthcheck"]
+ }
+ logger.info("Startup command: " + str(xNF_startup_command))
+ logger.info("Healthcheck command: " + str(xNF_healthcheck_command))
return dockerClient.containers.run(SIMULATOR_IMAGE_FULL_NAME,
- command=["--listen-port", port,
- "--ves-host", "ves-hv-collector",
- "--ves-port", "6061",
- "--cert-file", client_crt_path,
- "--private-key-file", client_key_path,
- "--trust-cert-file", client_trust_store
- ],
- healthcheck={
- "interval": 5 * ONE_SECOND_IN_NANOS,
- "timeout": 3 * ONE_SECOND_IN_NANOS,
- "retries": 1,
- "test": ["CMD", "curl", "--request", "GET",
- "--fail", "--silent", "--show-error",
- "localhost:" + port + "/healthcheck"]
- },
+ command=xNF_startup_command,
+ healthcheck=xNF_healthcheck_command,
detach=True,
network="ves-hv-default",
ports={port + "/tcp": port},
+ volumes=self.container_volumes(),
name="ves-hv-collector-xnf-simulator" + port)
- def copy_required_certificates_into_simulator(self, container):
- container.exec_run("mkdir -p /etc/ves-hv")
- copy_to_container(container.id, [
- certificates_dir_path + "client.crt",
- certificates_dir_path + "client.key",
- certificates_dir_path + "trust.crt",
- certificates_dir_path + "invalid_client.crt",
- certificates_dir_path + "invalid_client.key",
- certificates_dir_path + "invalid_trust.crt",
- ])
+ def container_volumes(self):
+ return {certificates_dir_path: {"bind": collector_certs_lookup_dir, "mode": 'rw'}}
def assert_containers_startup_was_successful(self, dockerClient):
checks_amount = 6
@@ -95,11 +93,15 @@ class XnfSimulatorLibrary:
container_health = container.attrs['State']['Health']['Status']
return container_health == 'healthy' and container.status == 'running'
- def stop_and_remove_all_xnf_simulators(self):
+ def stop_and_remove_all_xnf_simulators(self, suite_name):
dockerClient = docker.from_env()
for container in self.get_simulators_list(dockerClient):
logger.info("Stopping and removing container: " + container.id)
- logger.debug(container.logs())
+ log_filename = WORKSPACE_ENV + "/archives/containers_logs/" + \
+ suite_name.split(".")[-1] + "_" + container.name + ".log"
+ file = open(log_filename, "w+")
+ file.write(container.logs())
+ file.close()
container.stop()
container.remove()
dockerClient.close()
diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/hv-ves.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot
index 482b698fe..6153afa0a 100644
--- a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/hv-ves.robot
+++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot
@@ -1,19 +1,28 @@
*** Settings ***
-Library DcaeAppSimulatorLibrary
+Library DcaeAppSimulatorLibrary
Library XnfSimulatorLibrary
Library VesHvContainersUtilsLibrary
Library Collections
+Resource resources/common-keywords.robot
+
Suite Setup Message Routing Suite Setup
Suite Teardown VES-HV Collector Suite Teardown
Test Teardown VES-HV Collector Test Shutdown
+*** Keywords ***
+Message Routing Suite Setup
+ Log Started Suite: VES-HV Message Routing
+ ${XNF_PORTS_LIST}= Create List 7000
+ Configure Valid xNF Simulators On Ports ${XNF_PORTS_LIST}
+ Log Suite setup finished
+
*** Test Cases ***
Correct Messages Routing
[Documentation] VES-HV Collector should route all valid messages to topics specified in configuration
... and do not change message payload generated in XNF simulator
- ${SIMULATORS_LIST}= Get xNF Simulators 1
+ ${SIMULATORS_LIST}= Get Valid xNF Simulators 1
Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_FIXED_PAYLOAD_REQUEST}
Wait until keyword succeeds 60 sec 5 sec
@@ -24,7 +33,7 @@ Correct Messages Routing
Too big payload message handling
[Documentation] VES-HV Collector should interrupt the stream when encountered message with too big payload
- ${SIMULATORS_LIST}= Get xNF Simulators 1
+ ${SIMULATORS_LIST}= Get Valid xNF Simulators 1
Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_TOO_BIG_PAYLOAD_REQUEST}
Wait until keyword succeeds 60 sec 5 sec
@@ -34,7 +43,7 @@ Too big payload message handling
Invalid wire frame message handling
[Documentation] VES-HV Collector should skip messages with invalid wire frame
- ${SIMULATORS_LIST}= Get xNF Simulators 1
+ ${SIMULATORS_LIST}= Get Valid xNF Simulators 1
Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_INVALID_WIRE_FRAME_REQUEST}
Wait until keyword succeeds 60 sec 5 sec
@@ -45,7 +54,7 @@ Invalid wire frame message handling
Invalid GPB data message handling
[Documentation] VES-HV Collector should skip messages with invalid GPB data
- ${SIMULATORS_LIST}= Get xNF Simulators 1
+ ${SIMULATORS_LIST}= Get Valid xNF Simulators 1
Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_INVALID_GPB_DATA_REQUEST}
Wait until keyword succeeds 60 sec 5 sec
@@ -56,47 +65,13 @@ Invalid GPB data message handling
Unsupported domain message handling
[Documentation] VES-HV Collector should skip messages with unsupported domain
- ${SIMULATORS_LIST}= Get xNF Simulators 1
+ ${SIMULATORS_LIST}= Get Valid xNF Simulators 1
Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_UNSUPPORTED_DOMAIN_REQUEST}
Wait until keyword succeeds 60 sec 5 sec
... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_50000}
Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_UNSUPPORTED_DOMAIN_REQUEST}
-*** Keywords ***
-Message Routing Suite Setup
- Log Started Suite: VES-HV Message Routing
- ${XNF_PORTS_LIST}= Create List 7000
- Configure xNF Simulators On Ports ${XNF_PORTS_LIST}
- Log Suite setup finished
-
-Configure xNF Simulators On Ports
- [Arguments] ${XNF_PORTS_LIST}
- ${XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} True
- Set Suite Variable ${XNF_SIMULATORS_ADDRESSES}
-
-
-Get xNF Simulators
- [Arguments] ${AMOUNT}
- ${SIMULATORS}= Get Slice From List ${XNF_SIMULATORS_ADDRESSES} 0 ${AMOUNT}
- [Return] ${SIMULATORS}
-
-
-Send Messages From xNF Simulators
- [Arguments] ${XNF_HOSTS_LIST} ${MESSAGE_FILEPATH}
- :FOR ${HOST} IN @{XNF_HOSTS_LIST}
- \ ${XNF_SIM_API_ACCESS}= Get xNF Sim Api Access Url ${HTTP_METHOD_URL} ${HOST}
- \ ${XNF_SIM_API_URL}= Catenate SEPARATOR= ${XNF_SIM_API_ACCESS} ${XNF_SIM_API_PATH}
- \ Send messages ${XNF_SIM_API_URL} ${MESSAGE_FILEPATH}
-
-
-VES-HV Collector Test Shutdown
- Reset DCAE App Simulator ${DCAE_APP_API_MESSAGE_RESET_URL}
-
-
-VES-HV Collector Suite Teardown
- Stop And Remove All Xnf Simulators
-
*** Variables ***
${HTTP_METHOD_URL} http://
diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot
index 345118657..bc03de232 100644
--- a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot
+++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot
@@ -6,13 +6,13 @@ Library Collections
*** Keywords ***
Configure Valid xNF Simulators On Ports
[Arguments] ${XNF_PORTS_LIST}
- ${VALID_XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} True
+ ${VALID_XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} ${true}
Set Suite Variable ${VALID_XNF_SIMULATORS_ADDRESSES}
Configure Invalid xNF Simulators On Ports
[Arguments] ${XNF_PORTS_LIST}
- ${INVALID_XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} False
+ ${INVALID_XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} ${false}
Set Suite Variable ${INVALID_XNF_SIMULATORS_ADDRESSES}
@@ -41,7 +41,7 @@ VES-HV Collector Test Shutdown
VES-HV Collector Suite Teardown
- Stop And Remove All Xnf Simulators
+ Stop And Remove All Xnf Simulators ${SUITE NAME}
*** Variables ***
${HTTP_METHOD_URL} http://
diff --git a/test/csit/tests/dcaegen2/prh-testcases/__init__.robot b/test/csit/tests/dcaegen2/prh-testcases/__init__.robot
index e69de29bb..f13ba6df8 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/__init__.robot
+++ b/test/csit/tests/dcaegen2/prh-testcases/__init__.robot
@@ -0,0 +1,2 @@
+*** Settings ***
+Documentation Integration - PRH suite \ No newline at end of file
diff --git a/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot b/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot
index b7013c4a2..5150a4b35 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot
+++ b/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot
@@ -10,7 +10,7 @@ Resource resources/prh_library.robot
${DMAAP_SIMULATOR_URL} http://${DMAAP_SIMULATOR}
${AAI_SIMULATOR_URL} http://${AAI_SIMULATOR}
${PRH_URL} http://${PRH}
-${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
+${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} {"event": {"commonEventHeader": {"sourceName":"NOK6061ZW1"}, "pnfRegistrationFields": {"oamV4IpAddress":"10.16.123.234", "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
${Not_json_format} ""
*** Test Cases ***
@@ -19,28 +19,18 @@ Valid DMaaP event can be converted to PNF_READY notification
[Tags] PRH Valid event
[Template] Valid event processing
${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002G", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002F", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"Ericsson", "pnfSerialNumber":"QTFCOC5400000", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85b3:0000:0000:8a2e:0370:7334"}}}
+ {"event": {"commonEventHeader": {"sourceName":"NOK6061ZW2"}, "pnfRegistrationFields": {"oamV4IpAddress":"10.17.123.234", "oamV6IpAddress":""}}}
+ {"event": {"commonEventHeader": {"sourceName":"ERI6061ZW3"}, "pnfRegistrationFields": {"oamV4IpAddress":"", "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334"}}}
Invalid DMaaP event cannot be converted to PNF_READY notification
[Documentation] PRH get invalid event from DMaaP with missing required fields - PRH does not produce PNF_READY notification
[Tags] PRH Invalid event
[Template] Invalid event processing
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}}
- {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}}
- ${Not_json_format}
+ {"event": {"commonEventHeader": {"sourceName":"NOK6061ZW4"}, "pnfRegistrationFields": {"oamV4IpAddress":"", "oamV6IpAddress":""}}}
+ {"event": {"commonEventHeader": {"sourceName":""}, "pnfRegistrationFields": {"oamV4IpAddress":"10.18.123.234", "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334"}}}
+ {"event": {"commonEventHeader": {"sourceName":""}, "pnfRegistrationFields": {"oamV4IpAddress":"10.17.163.234", "oamV6IpAddress":""}}}
+ {"event": {"commonEventHeader": {"sourceName":""}, "pnfRegistrationFields": {"oamV4IpAddress":"", "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334"}}}
+ {"event": {"commonEventHeader": {"sourceName":""}, "pnfRegistrationFields": {"oamV4IpAddress":"", "oamV6IpAddress":""}}}
Get valid event from DMaaP and record in AAI does not exist
[Documentation] PRH get valid event from DMaaP with all required fields and in AAI record doesn't exist - PRH does not produce PNF_READY notification
@@ -48,7 +38,13 @@ Get valid event from DMaaP and record in AAI does not exist
[Timeout] 30s
Set PNF name in AAI wrong_aai_record
Set event in DMaaP ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS}
- Wait Until Keyword Succeeds 100x 300ms Check PRH log org.onap.dcaegen2.services.prh.exceptions.AAINotFoundException: Incorrect response code for continuation of tasks workflow
+ Wait Until Keyword Succeeds 100x 300ms Check PRH log java.io.IOException: Connection closed prematurely
+
+Event in DMaaP is not JSON format
+ [Documentation] PRH get not JSON format event from DMaaP - PRH does not produce PNF_READY notification
+ [Tags] PRH
+ Set event in DMaaP ${Not_json_format}
+ Wait Until Keyword Succeeds 100x 300ms Check PRH log |java.lang.IllegalStateException: Not a JSON Array:
Get valid event from DMaaP and AAI is not responding
[Documentation] PRH get valid event from DMaaP with all required fields and AAI is not responding - PRH does not produce PNF_READY notification
@@ -56,4 +52,4 @@ Get valid event from DMaaP and AAI is not responding
[Timeout] 180s
Stop AAI
Set event in DMaaP ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS}
- Wait Until Keyword Succeeds 100x 300ms Check PRH log java.net.NoRouteToHostException: Host is unreachable (Host unreachable)
+ Wait Until Keyword Succeeds 100x 300ms Check PRH log java.net.UnknownHostException: aai
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py b/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
index ac3fba46e..c2a8b78a2 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
@@ -21,16 +21,18 @@ class PrhLibrary(object):
@staticmethod
def create_pnf_ready_notification(json_file):
json_to_python = json.loads(json_file)
- ipv4 = json_to_python["event"]["otherFields"]["pnfOamIpv4Address"]
- ipv6 = json_to_python["event"]["otherFields"]["pnfOamIpv6Address"]
- pnf_name = _create_pnf_name(json_file)
- str_json = '{"pnf-name":"' + pnf_name + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}'
+ ipv4 = json_to_python["event"]["pnfRegistrationFields"]["oamV4IpAddress"]
+ ipv6 = json_to_python["event"]["pnfRegistrationFields"]["oamV6IpAddress"]
+ header = json_to_python["event"]["commonEventHeader"]["sourceName"]
+ str_json = '{"sourceName":"' + header + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}'
python_to_json = json.dumps(str_json)
return python_to_json.replace("\\", "")[1:-1]
@staticmethod
def create_pnf_name(json_file):
- return _create_pnf_name(json_file)
+ json_to_python = json.loads(json_file)
+ header = json_to_python["event"]["commonEventHeader"]["sourceName"]
+ return header
@staticmethod
def stop_aai():
@@ -38,9 +40,7 @@ class PrhLibrary(object):
container = client.containers.get('aai_simulator')
container.stop()
-
-def _create_pnf_name(json_file):
- json_to_python = json.loads(json_file)
- vendor = json_to_python["event"]["otherFields"]["pnfVendorName"]
- serial_number = json_to_python["event"]["otherFields"]["pnfSerialNumber"]
- return vendor[:3].upper() + serial_number
+ def create_invalid_notification(self, json_file):
+ return self.create_pnf_ready_notification(json_file).replace("\":", "\": ")\
+ .replace("ipaddress-v4-oam", "oamV4IpAddress").replace("ipaddress-v6-oam", "oamV6IpAddress")\
+ .replace("}", "\\\\n}")
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml b/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
index b1f84fda2..67921e8e0 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
@@ -1,12 +1,15 @@
version: '3'
services:
prh:
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server
+ image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server:latest
command: >
- --dmaap.dmaapConsumerConfiguration.dmaapPortNumber=2222
- --dmaap.dmaapProducerConfiguration.dmaapPortNumber=2222
- --aai.aaiClientConfiguration.aaiHostPortNumber=3333
- --aai.aaiClientConfiguration.aaiProtocol=http
+ --dmaap.dmaapConsumerConfiguration.dmaapHostName=dmaap
+ --dmaap.dmaapConsumerConfiguration.dmaapPortNumber=2222
+ --dmaap.dmaapProducerConfiguration.dmaapHostName=dmaap
+ --dmaap.dmaapProducerConfiguration.dmaapPortNumber=2222
+ --aai.aaiClientConfiguration.aaiHostPortNumber=3333
+ --aai.aaiClientConfiguration.aaiHost=aai
+ --aai.aaiClientConfiguration.aaiProtocol=http
entrypoint:
- java
- -Dspring.profiles.active=dev
@@ -18,10 +21,10 @@ services:
- "8433:8433"
container_name: prh
depends_on:
- - dmaap_simulator
- - aai_simulator
+ - dmaap
+ - aai
- dmaap_simulator:
+ dmaap:
build:
context: simulator
dockerfile: DMaaP_simulator
@@ -29,7 +32,7 @@ services:
- "2222:2222"
container_name: dmaap_simulator
- aai_simulator:
+ aai:
build:
context: simulator
dockerfile: AAI_simulator
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot b/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot
index 10bc26c18..fa8c0d052 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot
@@ -1,6 +1,7 @@
*** Settings ***
Library RequestsLibrary
Library Collections
+Library PrhLibrary.py
*** Keywords ***
Create header
@@ -17,8 +18,10 @@ Invalid event processing
[Arguments] ${input_invalid_event_in_dmaap}
[Timeout] 30s
Set event in DMaaP ${input_invalid_event_in_dmaap}
- Wait Until Keyword Succeeds 100x 100ms Check PRH log INFO 1 --- [pool-2-thread-1] o.o.d.s.prh.tasks.DmaapConsumerTaskImpl \ : Consumed model from DmaaP: ${input_invalid_event_in_dmaap}
-
+ ${invalid_notification}= Create invalid notification ${input_invalid_event_in_dmaap}
+ ${notification}= Catenate SEPARATOR= \\\\n |org.onap.dcaegen2.services.prh.exceptions.DmaapNotFoundException: Incorrect json, consumerDmaapModel can not be created: ${invalid_notification}
+ Wait Until Keyword Succeeds 100x 100ms Check PRH log ${notification}
+
Valid event processing
[Arguments] ${input_valid_event_in_dmaap}
[Timeout] 30s
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py
index e70d8d30f..c57903c30 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py
@@ -7,6 +7,7 @@ pnfs = 'Empty'
class AAIHandler(BaseHTTPRequestHandler):
+
def do_PUT(self):
if re.search('/set_pnfs', self.path):
global pnfs
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator
index 013cd0a65..89a266ebe 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator
@@ -1,4 +1,12 @@
-FROM python:3
+FROM alpine:3.8
+
+RUN apk add --no-cache python3 && \
+ python3 -m ensurepip && \
+ rm -r /usr/lib/python*/ensurepip && \
+ pip3 install --upgrade pip setuptools && \
+ if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
+ if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \
+ rm -r /root/.cache
ADD AAI.py /
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py
index 210378421..96e22a141 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py
@@ -8,6 +8,7 @@ received_event_to_get_method = 'Empty'
class DMaaPHandler(BaseHTTPRequestHandler):
+
def do_PUT(self):
if re.search('/set_get_event', self.path):
global received_event_to_get_method
@@ -27,7 +28,7 @@ class DMaaPHandler(BaseHTTPRequestHandler):
return
def do_GET(self):
- if re.search('/events/unauthenticated.SEC_OTHER_OUTPUT/OpenDcae-c12/c12', self.path):
+ if re.search('/events/unauthenticated.VES_PNFREG_OUTPUT/OpenDcae-c12/c12', self.path):
_header_200_and_json(self)
self.wfile.write(received_event_to_get_method)
elif re.search('/events/pnfReady', self.path):
diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator
index cf4160c89..9cf21dc92 100644
--- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator
+++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator
@@ -1,4 +1,12 @@
-FROM python:3
+FROM alpine:3.8
+
+RUN apk add --no-cache python3 && \
+ python3 -m ensurepip && \
+ rm -r /usr/lib/python*/ensurepip && \
+ pip3 install --upgrade pip setuptools && \
+ if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
+ if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \
+ rm -r /root/.cache
ADD DMaaP.py /
diff --git a/test/csit/tests/dmaap-buscontroller/with_dr/orig b/test/csit/tests/dmaap-buscontroller/with_dr/orig
new file mode 100755
index 000000000..fcac20263
--- /dev/null
+++ b/test/csit/tests/dmaap-buscontroller/with_dr/orig
@@ -0,0 +1,116 @@
+*** Settings ***
+Library OperatingSystem
+Library RequestsLibrary
+Library requests
+Library Collections
+Library String
+
+*** Variables ***
+${TARGET_URL} https://${DR_PROV_IP}:8443
+${TARGET_URL_FEED} https://${DR_PROV_IP}:8443/feed/1
+${TARGET_URL_SUBSCRIBE} https://${DR_PROV_IP}:8443/subscribe/1
+${TARGET_URL_SUBSCRIPTION} https://${DR_PROV_IP}:8443/subs/1
+${TARGET_URL_PUBLISH} https://${DR_NODE_IP}:8443/publish/1/csit_test
+${CREATE_FEED_DATA} {"name": "CSIT_Test", "version": "m1.0", "description": "CSIT_Test", "business_description": "CSIT_Test", "suspend": false, "deleted": false, "changeowner": true, "authorization": {"classification": "unclassified", "endpoint_addrs": [], "endpoint_ids": [{"password": "rs873m", "id": "rs873m"}]}}
+${UPDATE_FEED_DATA} {"name": "CSIT_Test", "version": "m1.0", "description": "UPDATED-CSIT_Test", "business_description": "CSIT_Test", "suspend": true, "deleted": false, "changeowner": true, "authorization": {"classification": "unclassified", "endpoint_addrs": [], "endpoint_ids": [{"password": "rs873m", "id": "rs873m"}]}}
+${SUBSCRIBE_DATA} {"delivery":{ "url":"https://${DR_PROV_IP}:8080/", "user":"rs873m", "password":"rs873m", "use100":true}, "metadataOnly":false, "suspend":false, "groupid":29, "subscriber":"sg481n"}
+${UPDATE_SUBSCRIPTION_DATA} {"delivery":{ "url":"https://${DR_PROV_IP}:8080/", "user":"sg481n", "password":"sg481n", "use100":true}, "metadataOnly":false, "suspend":true, "groupid":29, "subscriber":"sg481n"}
+${FEED_CONTENT_TYPE} application/vnd.att-dr.feed
+${SUBSCRIBE_CONTENT_TYPE} application/vnd.att-dr.subscription
+${PUBLISH_FEED_CONTENT_TYPE} application/octet-stream
+
+*** Test Cases ***
+Run Feed Creation
+ [Documentation] Feed Creation
+ [Timeout] 1 minute
+ ${resp}= PostCall ${TARGET_URL} ${CREATE_FEED_DATA} ${FEED_CONTENT_TYPE} rs873m
+ log ${TARGET_URL}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 201
+ log 'JSON Response Code:'${resp}
+
+Run Subscribe to Feed
+ [Documentation] Subscribe to Feed
+ [Timeout] 1 minute
+ ${resp}= PostCall ${TARGET_URL_SUBSCRIBE} ${SUBSCRIBE_DATA} ${SUBSCRIBE_CONTENT_TYPE} sg481n
+ log ${TARGET_URL_SUBSCRIBE}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 201
+ log 'JSON Response Code:'${resp}
+
+Run Publish Feed
+ [Documentation] Publish to Feed
+ [Timeout] 1 minute
+ Sleep 10s Behaviour was noticed where feed was not created in time for publish to be sent
+ ${resp}= PutCall ${TARGET_URL_PUBLISH} ${CREATE_FEED_DATA} ${PUBLISH_FEED_CONTENT_TYPE} rs873m
+ log ${TARGET_URL_PUBLISH}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 204
+ log 'JSON Response Code:'${resp}
+
+Run Update Subscription
+ [Documentation] Update Subscription to suspend and change delivery credentials
+ [Timeout] 1 minute
+ ${resp}= PutCall ${TARGET_URL_SUBSCRIPTION} ${UPDATE_SUBSCRIPTION_DATA} ${SUBSCRIBE_CONTENT_TYPE} sg481n
+ log ${TARGET_URL_SUBSCRIPTION}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ log 'JSON Response Code:'${resp}
+ ${resp}= GetCall ${TARGET_URL_SUBSCRIPTION} ${SUBSCRIBE_CONTENT_TYPE} sg481n
+ log ${resp.text}
+ Should Contain ${resp.text} "password":"sg481n","user":"sg481n"
+ log 'JSON Response Code:'${resp}
+
+Run Update Feed
+ [Documentation] Update Feed description and suspend
+ [Timeout] 1 minute
+ ${resp}= PutCall ${TARGET_URL_FEED} ${UPDATE_FEED_DATA} ${FEED_CONTENT_TYPE} rs873m
+ log ${TARGET_URL_FEED}
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ log 'JSON Response Code:'${resp}
+ ${resp}= GetCall ${TARGET_URL_FEED} ${FEED_CONTENT_TYPE} rs873m
+ log ${resp.text}
+ Should Contain ${resp.text} "UPDATED-CSIT_Test"
+ log 'JSON Response Code:'${resp}
+
+Run Delete Subscription
+ [Documentation] Delete Subscription
+ [Timeout] 1 minute
+ ${resp}= DeleteCall ${TARGET_URL_SUBSCRIPTION} sg481n
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 204
+ log 'JSON Response Code:'${resp}
+
+Run Delete Feed
+ [Documentation] Delete Feed
+ [Timeout] 1 minute
+ ${resp}= DeleteCall ${TARGET_URL_FEED} rs873m
+ log ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 204
+ log 'JSON Response Code:'${resp}
+
+*** Keywords ***
+PostCall
+ [Arguments] ${url} ${data} ${content_type} ${user}
+ ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type}
+ ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests
+ [Return] ${resp}
+
+PutCall
+ [Arguments] ${url} ${data} ${content_type} ${user}
+ ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type} Authorization=Basic cnM4NzNtOnJzODczbQ==
+ ${resp}= Evaluate requests.put('${url}',data='${data}', headers=${headers},verify=False) requests
+ [Return] ${resp}
+
+GetCall
+ [Arguments] ${url} ${content_type} ${user}
+ ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type}
+ ${resp}= Evaluate requests.get('${url}', headers=${headers},verify=False) requests
+ [Return] ${resp}
+
+DeleteCall
+ [Arguments] ${url} ${user}
+ ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user}
+ ${resp}= Evaluate requests.delete('${url}', headers=${headers},verify=False) requests
+ [Return] ${resp}
diff --git a/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot b/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot
new file mode 100644
index 000000000..a3aef42b8
--- /dev/null
+++ b/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot
@@ -0,0 +1,143 @@
+*** Settings ***
+Resource ../../common.robot
+Library Collections
+Library json
+Library OperatingSystem
+Library RequestsLibrary
+Library HttpLibrary.HTTP
+Library String
+
+
+*** Variables ***
+${MESSAGE} Hello, world!
+${DBC_URI} webapi
+${DBC_URL} http://${DMAAPBC_IP}:8080/${DBC_URI}
+${LOC} csit-sanfrancisco
+${PUB_CORE} "dcaeLocationName": "${LOC}", "clientRole": "org.onap.dmaap.client.pub", "action": [ "pub", "view" ]
+${SUB_CORE} "dcaeLocationName": "${LOC}", "clientRole": "org.onap.dmaap.client.sub", "action": [ "sub", "view" ]
+${PUB} { ${PUB_CORE} }
+${SUB} { ${SUB_CORE} }
+${FEED1_DATA} { "feedName":"feed1", "feedVersion": "csit", "feedDescription":"generated for CSIT", "owner":"dgl", "asprClassification": "unclassified" }
+${FEED2_DATA} { "feedName":"feed2", "feedVersion": "csit", "feedDescription":"generated for CSIT", "owner":"dgl", "asprClassification": "unclassified" }
+${PUB2_DATA} { "dcaeLocationName": "${LOC}", "username": "pub2", "userpwd": "topSecret123", "feedId": "2" }
+${SUB2_DATA} { "dcaeLocationName": "${LOC}", "username": "sub2", "userpwd": "someSecret123", "deliveryURL": "https://${DMAAPBC_IP}:8443/webapi/noURI", "feedId": "2" }
+${TOPIC2_DATA} { "topicName":"singleMRtopic2", "topicDescription":"generated for CSIT", "owner":"dgl", "clients": [ ${PUB}, ${SUB}] }
+${TOPIC3_DATA} { "topicName":"singleMRtopic3", "topicDescription":"generated for CSIT", "owner":"dgl"}
+#${PUB3_DATA} { "fqtn": "${TOPIC_NS}.singleMRtopic3", ${PUB_CORE} }
+#${SUB3_DATA} { "fqtn": "${TOPIC_NS}.singleMRtopic3", ${SUB_CORE} }
+
+
+
+*** Test Cases ***
+Url Test
+ [Documentation] Check if www.onap.org can be reached
+ Create Session sanity http://onap.readthedocs.io
+ ${resp}= Get Request sanity /
+ Should Be Equal As Integers ${resp.status_code} 200
+
+(DMAAP-441c1)
+ [Documentation] Create Feed w no clients POST ${DBC_URI}/feeds endpoint
+ ${resp}= PostCall ${DBC_URL}/feeds ${FEED1_DATA}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+(DMAAP-441c2)
+ [Documentation] Create Feed w clients POST ${DBC_URI}/feeds endpoint
+ ${resp}= PostCall ${DBC_URL}/feeds ${FEED2_DATA}
+ Should Be Equal As Integers ${resp.status_code} 200
+
+(DMAAP-441c3)
+ [Documentation] Add Publisher to existing feed
+ ${resp}= PostCall ${DBC_URL}/dr_pubs ${PUB2_DATA}
+ Should Be Equal As Integers ${resp.status_code} 201
+ ${tmp}= Get Json Value ${resp.text} /pubId
+ ${tmp}= Remove String ${tmp} \"
+ Set Suite Variable ${pubId} ${tmp}
+
+(DMAAP-441c4)
+ [Documentation] Add Subscriber to existing feed
+ ${resp}= PostCall ${DBC_URL}/dr_subs ${SUB2_DATA}
+ Should Be Equal As Integers ${resp.status_code} 201
+ ${tmp}= Get Json Value ${resp.text} /subId
+ ${tmp}= Remove String ${tmp} \"
+ Set Suite Variable ${subId} ${tmp}
+
+(DMAAP-443)
+ [Documentation] List existing feeds
+ Create Session get ${DBC_URL}
+ ${resp}= Get Request get /feeds
+ Should Be Equal As Integers ${resp.status_code} 200
+
+(DMAAP-444)
+ [Documentation] Delete existing subscriber
+ ${resp}= DelCall ${DBC_URL}/dr_subs/${subId}
+ Should Be Equal As Integers ${resp.status_code} 204
+
+(DMAAP-445)
+ [Documentation] Delete existing publisher
+ ${resp}= DelCall ${DBC_URL}/dr_pubs/${pubId}
+ Should Be Equal As Integers ${resp.status_code} 204
+
+#(DMAAP-294)
+# [Documentation] Create Topic w pub and sub clients POST ${DBC_URI}/topics endpoint
+# ${resp}= PostCall ${DBC_URL}/topics ${TOPIC2_DATA}
+# Should Be Equal As Integers ${resp.status_code} 201
+#
+#(DMAAP-295)
+# [Documentation] Create Topic w no clients and then add a client POST ${DBC_URI}/mr_clients endpoint
+# ${resp}= PostCall ${DBC_URL}/topics ${TOPIC3_DATA}
+# Should Be Equal As Integers ${resp.status_code} 201
+# ${resp}= PostCall ${DBC_URL}/mr_clients ${PUB3_DATA}
+# Should Be Equal As Integers ${resp.status_code} 200
+# ${resp}= PostCall ${DBC_URL}/mr_clients ${SUB3_DATA}
+# Should Be Equal As Integers ${resp.status_code} 200
+#
+#(DMAAP-297)
+# [Documentation] Query for all topics and specific topic
+# Create Session get ${DBC_URL}
+# ${resp}= Get Request get /topics
+# Should Be Equal As Integers ${resp.status_code} 200
+# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3
+# Should Be Equal As Integers ${resp.status_code} 200
+#
+#(DMAAP-301)
+# [Documentation] Delete a subscriber
+# Create Session get ${DBC_URL}
+# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3
+# Should Be Equal As Integers ${resp.status_code} 200
+# ${tmp}= Get Json Value ${resp.text} /clients/1/mrClientId
+# ${clientId}= Remove String ${tmp} \"
+# ${resp}= DelCall ${DBC_URL}/mr_clients/${clientId}
+# Should Be Equal As Integers ${resp.status_code} 204
+#
+#(DMAAP-302)
+# [Documentation] Delete a publisher
+# Create Session get ${DBC_URL}
+# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3
+# Should Be Equal As Integers ${resp.status_code} 200
+# ${tmp}= Get Json Value ${resp.text} /clients/0/mrClientId
+# ${clientId}= Remove String ${tmp} \"
+# ${resp}= DelCall ${DBC_URL}/mr_clients/${clientId}
+# Should Be Equal As Integers ${resp.status_code} 204
+
+
+*** Keywords ***
+CheckDir
+ [Arguments] ${path}
+ Directory Should Exist ${path}
+
+CheckUrl
+ [Arguments] ${session} ${path} ${expect}
+ ${resp}= Get Request ${session} ${path}
+ Should Be Equal As Integers ${resp.status_code} ${expect}
+
+PostCall
+ [Arguments] ${url} ${data}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests
+ [Return] ${resp}
+
+DelCall
+ [Arguments] ${url}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Evaluate requests.delete('${url}', headers=${headers},verify=False) requests
+ [Return] ${resp}
diff --git a/test/csit/tests/music/music-suite/music-test.robot b/test/csit/tests/music/music-suite/music-test.robot
index 9f8e435c8..9fc937e49 100644
--- a/test/csit/tests/music/music-suite/music-test.robot
+++ b/test/csit/tests/music/music-suite/music-test.robot
@@ -5,6 +5,9 @@ Library json
*** Variables ***
${MESSAGE} {"ping": "ok"}
+${BASIC} Basic
+${AUTHVALUE} bXVzaWM6bXVzaWM=
+${Authorization} ${BASIC} ${AUTHVALUE}
#global variables
${generatedAID}
@@ -60,7 +63,7 @@ Music AddOnBoarding
[Documentation] It sends a REST POST request to Music to Onboard a new application
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}onboard.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} Content-Type=application/json Accept=application/json
${resp}= Post Request musicaas /MUSIC/rest/v2/admin/onboardAppWithMusic data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -75,7 +78,7 @@ Music CreateKeyspace
[Documentation] It sends a REST POST request to Music to create a new keyspace in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}createkeyspace.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -86,7 +89,7 @@ Music CreateTable
[Documentation] It sends a REST POST request to Music to create a new Table in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}createtable.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -97,7 +100,7 @@ Music InsertRow
[Documentation] It sends a REST POST request to Music to create a new row in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}insertrow_eventual.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows/?row=emp1 data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -107,7 +110,7 @@ Music InsertRow
Music ReadRowJustInserted
[Documentation] It sends a REST GET request to Music to Read the row just inserted in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Get Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -118,7 +121,7 @@ Music UpdateRowInAtomicWay
[Documentation] It sends a REST PUT request to Music to create a new row in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}updaterow_atomic.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Put Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -128,7 +131,7 @@ Music UpdateRowInAtomicWay
Music ReadRowAfterUpdate
[Documentation] It sends a REST GET request to Music to Read the row just inserted in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Get Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -139,7 +142,7 @@ Music DeleteRow
[Documentation] It sends a REST DELETE request to Music to delete a row in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}deleterow_eventual.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -150,7 +153,7 @@ Music DropTable
[Documentation] It sends a REST Delete request to Music to drop one existing Table in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}droptable.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -161,7 +164,7 @@ Music DropKeyspace
[Documentation] It sends a REST DELETE request to Music to drop one existing keyspace in Cassandra
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}dropkeyspace.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
@@ -173,7 +176,7 @@ Music DeleteOnBoarding
[Documentation] It sends a REST DELETE request to Music to remove a previosly onboarded application
Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT}
${data}= Get Binary File ${CURDIR}${/}data${/}onboard.json
- &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json
+ &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json
${resp}= Delete Request musicaas /MUSIC/rest/v2/admin/onboardAppWithMusic data=${data} headers=${headers}
Log To Console *********************
Log To Console response = ${resp}
diff --git a/test/csit/tests/policy/suite1/global_properties.robot b/test/csit/tests/policy/suite1/global_properties.robot
index f406bbf3d..911fdaff9 100644
--- a/test/csit/tests/policy/suite1/global_properties.robot
+++ b/test/csit/tests/policy/suite1/global_properties.robot
@@ -22,9 +22,9 @@ ${GLOBAL_AAI_CLOUD_OWNER} Rackspace
${GLOBAL_BUILD_NUMBER} 31
${GLOBAL_VM_PRIVATE_KEY} ${EXECDIR}/robot/assets/keys/robot_ssh_private_key.pvt
# policy info - everything is from the private oam network (also called ecomp private network)
-${GLOBAL_POLICY_SERVER_URL} http://%{PDP_IP}:8081
+${GLOBAL_POLICY_SERVER_URL} https://%{PDP_IP}:8081
${GLOBAL_POLICY_AUTH} dGVzdHBkcDphbHBoYTEyMw==
${GLOBAL_POLICY_CLIENTAUTH} cHl0aG9uOnRlc3Q=
${GLOBAL_POLICY_HEALTHCHECK_URL} http://%{POLICY_IP}:6969
${GLOBAL_POLICY_USERNAME} healthcheck
-${GLOBAL_POLICY_PASSWORD} zb!XztG34 \ No newline at end of file
+${GLOBAL_POLICY_PASSWORD} zb!XztG34
diff --git a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
index c9dbe6c46..8039ae177 100644
--- a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
+++ b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot
@@ -80,34 +80,34 @@ UnDeploy BPMN File Testt On MgrService
${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
Should Be Equal ${resp.status_code} ${200}
-Deploy BPMN File Test On MSB
- [Documentation] Check if the test bpmn file can be deployed in activiti engine
- ${auth}= Create List kermit kermit
- ${headers}= Create Dictionary Accept=application/json
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
- ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
- ${resp}= Post Request web_session api/workflow/v1/package files=${files}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- ${deployedId}= Set Variable ${resp.json()["deployedId"]}
- Set Global Variable ${deployedId}
+# Deploy BPMN File Test On MSB
+# [Documentation] Check if the test bpmn file can be deployed in activiti engine
+# ${auth}= Create List kermit kermit
+# ${headers}= Create Dictionary Accept=application/json
+# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+# ${files}= evaluate {"file":open('${bmpfilepath}','rb')}
+# ${resp}= Post Request web_session api/workflow/v1/package files=${files}
+# Should Be Equal ${resp.status_code} ${200}
+# Log ${resp.json()}
+# ${deployedId}= Set Variable ${resp.json()["deployedId"]}
+# Set Global Variable ${deployedId}
-Exectue BPMN File Testt On MSB
- [Documentation] Check if the test bpmn file can be exectued in MSB
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers}
- ${body} Create Dictionary processDefinitionKey=${processId}
- ${body} dumps ${body}
- ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
- Should Be Equal ${resp.status_code} ${200}
- Log ${resp.json()}
- Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
+# Exectue BPMN File Testt On MSB
+# [Documentation] Check if the test bpmn file can be exectued in MSB
+# ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA==
+# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers}
+# ${body} Create Dictionary processDefinitionKey=${processId}
+# ${body} dumps ${body}
+# ${resp}= Post Request web_session api/workflow/v1/process/instance ${body}
+# Should Be Equal ${resp.status_code} ${200}
+# Log ${resp.json()}
+# Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId}
-UnDeploy BPMN File Testt On MSB
- [Documentation] Check if the test bpmn file can be undeployed in MSB
- log ${deployedId}
- ${auth}= Create List kermit kermit
- ${headers} Create Dictionary Content-Type=application/json Accept=application/json
- Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
- ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
- Should Be Equal ${resp.status_code} ${200}
+# UnDeploy BPMN File Testt On MSB
+# [Documentation] Check if the test bpmn file can be undeployed in MSB
+# log ${deployedId}
+# ${auth}= Create List kermit kermit
+# ${headers} Create Dictionary Content-Type=application/json Accept=application/json
+# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth}
+# ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId}
+# Should Be Equal ${resp.status_code} ${200}
diff --git a/test/csit/tests/vid/resources/simulators/SDC_simulator b/test/csit/tests/vid/resources/simulators/SDC_simulator
index ec67e9fec..c099787dc 100644
--- a/test/csit/tests/vid/resources/simulators/SDC_simulator
+++ b/test/csit/tests/vid/resources/simulators/SDC_simulator
@@ -1,4 +1,12 @@
-FROM python:3
+FROM alpine:latest
+
+RUN apk add --no-cache python3 && \
+ python3 -m ensurepip && \
+ rm -r /usr/lib/python*/ensurepip && \
+ pip3 install --upgrade pip setuptools && \
+ if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
+ if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \
+ rm -r /root/.cache
ADD SDC.py /
diff --git a/test/ete/labs/gwu/onap-openstack-template.env b/test/ete/labs/gwu/onap-openstack-template.env
index 3071356e6..53139b542 100644
--- a/test/ete/labs/gwu/onap-openstack-template.env
+++ b/test/ete/labs/gwu/onap-openstack-template.env
@@ -46,9 +46,7 @@ parameters:
openstack_username: ${OS_USERNAME}
- openstack_api_key: ${OS_PASSWORD}
-
- openstack_auth_method: password
+ openstack_api_key: ${OS_PASSWORD_ENCRYPTED}
openstack_region: RegionOne
diff --git a/test/ete/labs/huawei/onap-openstack-template.env b/test/ete/labs/huawei/onap-openstack-template.env
index c682dc5cf..e6e2a2cbf 100644
--- a/test/ete/labs/huawei/onap-openstack-template.env
+++ b/test/ete/labs/huawei/onap-openstack-template.env
@@ -46,9 +46,7 @@ parameters:
openstack_username: ${OS_USERNAME}
- openstack_api_key: ${OS_PASSWORD}
-
- openstack_auth_method: password
+ openstack_api_key: ${OS_PASSWORD_ENCRYPTED}
openstack_region: RegionOne
diff --git a/test/ete/labs/tlab/onap-openstack-template.env b/test/ete/labs/tlab/onap-openstack-template.env
index feded7faf..dcdb7d574 100644
--- a/test/ete/labs/tlab/onap-openstack-template.env
+++ b/test/ete/labs/tlab/onap-openstack-template.env
@@ -46,9 +46,7 @@ parameters:
openstack_username: ${OS_USERNAME}
- openstack_api_key: ${OS_PASSWORD}
-
- openstack_auth_method: password
+ openstack_api_key: ${OS_PASSWORD_ENCRYPTED}
openstack_region: RegionOne
diff --git a/test/ete/labs/windriver/onap-openstack-template.env b/test/ete/labs/windriver/onap-openstack-template.env
index b18bd62c1..90d901d79 100644
--- a/test/ete/labs/windriver/onap-openstack-template.env
+++ b/test/ete/labs/windriver/onap-openstack-template.env
@@ -46,9 +46,7 @@ parameters:
openstack_username: ${OS_USERNAME}
- openstack_api_key: ${OS_PASSWORD}
-
- openstack_auth_method: password
+ openstack_api_key: ${OS_PASSWORD_ENCRYPTED}
openstack_region: RegionOne
diff --git a/test/ete/scripts/deploy-onap.sh b/test/ete/scripts/deploy-onap.sh
index 2fd05562a..6c8d05d41 100755
--- a/test/ete/scripts/deploy-onap.sh
+++ b/test/ete/scripts/deploy-onap.sh
@@ -19,6 +19,8 @@ fi
source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
+SO_ENCRYPTION_KEY=aa3871669d893c7fb8abbcda31b88b4f
+export OS_PASSWORD_ENCRYPTED=$(echo -n "$OS_PASSWORD" | openssl aes-128-ecb -e -K "$SO_ENCRYPTION_KEY" -nosalt | xxd -c 256 -p)
DEMO_DIR=${ONAP_WORKDIR}/demo
if [ "$#" -ge 2 ]; then
diff --git a/test/ete/scripts/teardown-onap.sh b/test/ete/scripts/teardown-onap.sh
index 61e643b64..77b8233fa 100755
--- a/test/ete/scripts/teardown-onap.sh
+++ b/test/ete/scripts/teardown-onap.sh
@@ -14,14 +14,14 @@ while getopts ":rqn:" o; do
if [ $answer = "y" ] || [ $answer = "Y" ] || [ $answer = "yes" ] || [ $answer = "Yes"]; then
echo "This may delete the work of other colleages within the same enviroment"
read -p "Are you certain this is what you want? (type y to confirm):" answer2
-
+
if [ $answer2 = "y" ] || [ $answer2 = "Y" ] || [ $answer2 = "yes" ] || [ $answer2 = "Yes"]; then
full_deletion=true
- else
+ else
echo "Ending program"
exit 1
fi
- else
+ else
echo "Ending program"
exit 1
fi
@@ -46,7 +46,7 @@ fi
source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh
-if [ "$full_deletion" = true ];then
+if [ "$full_deletion" = true ];then
echo "Commencing delete, press CRTL-C to stop"
sleep 10
@@ -92,19 +92,21 @@ if [ "$full_deletion" = true ];then
echo "No existing stacks to delete."
fi
-else
- #Restrained teardown
+else
+ #Restrained teardown
echo "Restrained teardown"
-
+
STACK=$install_name
- if [ ! -z "${STACK}" ]; then
+ STATUS=$(openstack stack check $STACK)
+
+ if [ "Stack not found: $install_name" != "$STATUS" ]; then
openstack stack delete $STACK
-
+
until [ "DELETE_IN_PROGRESS" != "$(openstack stack show -c stack_status -f value $STACK)" ]; do
sleep 2
done
else
echo "No existing stack with the name $install_name."
fi
-fi \ No newline at end of file
+fi
diff --git a/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml b/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml
new file mode 100644
index 000000000..d9acc9e37
--- /dev/null
+++ b/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml
@@ -0,0 +1,129 @@
+description: Heat template that deploys PnP PNF simulator
+heat_template_version: '2013-05-23'
+parameters:
+ flavor_name: {description: Type of instance (flavor) to be used, label: Flavor,
+ type: string}
+ image_name: {description: Image to be used for compute instance, label: Image name
+ or ID, type: string}
+ key_name: {description: Public/Private key pair name, label: Key pair name, type: string}
+ public_net_id: {description: Public network that enables remote connection to VNF,
+ label: Public network name or ID, type: string}
+ private_net_id: {type: string, description: Private network id, label: Private network name or ID}
+ private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID}
+ proxy: {type: string, description: Proxy, label: Proxy, default: ""}
+resources:
+ PNF_PnP_simualtor:
+ type: OS::Nova::Server
+ properties:
+ key_name: { get_param: key_name }
+ image: { get_param: image_name }
+ flavor: { get_param: flavor_name }
+ networks:
+ - port: { get_resource: PNF_PnP_simualtor_port0 }
+ user_data_format: RAW
+ user_data:
+ str_replace:
+ template: |
+ #!/bin/bash
+
+ set_versions () {
+ DOCKER_COMPOSE_VERSION=1.22.0
+ }
+
+
+ enable_root_ssh () {
+ sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
+ sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
+ service sshd restart
+ echo -e "arthur\narthur" | passwd root
+ }
+
+ update_os () {
+ dnf -y install fedora-upgrade
+ }
+
+ docker_install_configure () {
+ dnf -y remove docker \
+ docker-client \
+ docker-client-latest \
+ docker-common \
+ docker-latest \
+ docker-latest-logrotate \
+ docker-logrotate \
+ docker-selinux \
+ docker-engine-selinux \
+ docker-engine
+ dnf -y install dnf-plugins-core
+ dnf config-manager \
+ --add-repo \
+ https://download.docker.com/linux/fedora/docker-ce.repo
+ dnf -y install docker-ce
+ systemctl start docker
+ mkdir -p /etc/systemd/system/docker.service.d/
+ cat > /etc/systemd/system/docker.service.d/override.conf<< EOF
+ [Service]
+ Environment="HTTP_PROXY=$proxy"
+ Environment="HTTPS_PROXY=$proxy"
+ EOF
+ systemctl daemon-reload
+ systemctl restart docker
+ docker login -u docker -p docker nexus3.onap.org:10003
+ }
+ docker_compose_install () {
+ curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
+ chmod +x /usr/local/bin/docker-compose
+ }
+ pnf_sim_file_checkout () {
+ mkdir ~/sim/
+ mkdir ~/sim/ssh
+ cd ~/sim/ssh/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/ssh/ssh_host_rsa_key;hb=HEAD" -O ssh_host_rsa_key
+ cd ~/sim/ssh/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/ssh/ssh_host_rsa_key.pub;hb=HEAD" -O ssh_host_rsa_key.pub
+ mkdir ~/sim/sftp
+ cd ~/sim/sftp/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/sftp/sftp-file.txt;hb=HEAD" -O sftp-file.txt
+ mkdir ~/sim/config
+ cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/config/config.json;hb=HEAD" -O config.json
+ cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/config/netconf.env;hb=HEAD" -O netconf.env
+ mkdir ~/sim/json_schema
+ cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/json_schema/input_validator.json;hb=HEAD" -O input_validator.json
+ cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/json_schema/output_validator_ves_schema_30.0.1.json;hb=HEAD" -O output_validator_ves_schema_30.0.1.json
+ mkdir ~/sim/netconf
+ cd ~/sim/netconf/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/netconf/pnf-simulator.data.xml;hb=HEAD" -O pnf-simulator.data.xml
+ cd ~/sim/netconf/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/netconf/pnf-simulator.yang;hb=HEAD" -O pnf-simulator.yang
+ cd ~/sim/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/docker-compose.yml;hb=HEAD" -O docker-compose.yml
+ cd ~/sim/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/simulator.sh;hb=HEAD" -O simulator.sh
+ chmod 654 ~/sim/simulator.sh
+ }
+
+ start_simulator (){
+ ~/sim/simulator.sh start
+ }
+
+ set_versions
+ enable_root_ssh
+ update_os
+ docker_install_configure
+ docker_compose_install
+ pnf_sim_file_checkout
+ start_simulator
+ params:
+ $proxy: { get_param: proxy }
+ PNF_PnP_simualtor_port0:
+ type: OS::Neutron::Port
+ properties:
+ network_id: { get_param: private_net_id }
+ security_groups:
+ - default
+ fixed_ips:
+ - subnet_id: { get_param: private_subnet_id }
+ PNF_PnP_simualtor_public:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network_id: { get_param: public_net_id }
+ port_id: { get_resource: PNF_PnP_simualtor_port0 }
+outputs:
+ PNF_PnP_simualtor_private_ip:
+ description: IP address of PNF_PnP_simualtor in private network
+ value: { get_attr: [ PNF_PnP_simualtor, first_address ] }
+ PNF_PnP_simualtor_public_ip:
+ description: Floating IP address of PNF_PnP_simualtor in public network
+ value: { get_attr: [ PNF_PnP_simualtor_public, floating_ip_address ] }
diff --git a/version-manifest/src/main/resources/docker-manifest-staging.csv b/version-manifest/src/main/resources/docker-manifest-staging.csv
index 309580637..0b338ab3f 100644
--- a/version-manifest/src/main/resources/docker-manifest-staging.csv
+++ b/version-manifest/src/main/resources/docker-manifest-staging.csv
@@ -26,7 +26,7 @@ onap/data-router,1.2.2
onap/dmaap/buscontroller,1.0.12
onap/dmaap/dmaap-mr,1.1.6
onap/dmaap/kafka01101,0.0.1
-onap/externalapi/nbi,2.0.0
+onap/externalapi/nbi,3.0.0-latest
onap/gizmo,1.2.1
onap/holmes/engine-management,1.2.0-STAGING-latest
onap/holmes/rule-management,1.2.0-STAGING-latest
@@ -43,8 +43,8 @@ onap/music/cassandra_music,3.0.0
onap/music/music,2.5.3
onap/music/prom,1.0.5-latest
onap/oom/kube2msb,1.1.0
-onap/optf-has,1.1.1
-onap/optf-osdf,1.1.1
+onap/optf-has,1.2.1-STAGING-latest
+onap/optf-osdf,1.2.1-STAGING-latest
onap/org.onap.dcaegen2.collectors.snmptrap,1.3.0
onap/org.onap.dcaegen2.collectors.ves.vescollector,1.2.0
onap/org.onap.dcaegen2.deployments.bootstrap,1.1.3
diff --git a/version-manifest/src/main/resources/docker-manifest.csv b/version-manifest/src/main/resources/docker-manifest.csv
index 40baea280..9e1123344 100644
--- a/version-manifest/src/main/resources/docker-manifest.csv
+++ b/version-manifest/src/main/resources/docker-manifest.csv
@@ -25,7 +25,7 @@ onap/cli,2.0.2
onap/data-router,1.2.2
onap/dmaap/buscontroller,1.0.12
onap/dmaap/dmaap-mr,1.1.4
-onap/externalapi/nbi,2.0.0
+onap/externalapi/nbi,2.1.1
onap/gizmo,1.2.1
onap/holmes/engine-management,1.1.0
onap/holmes/rule-management,1.1.0