summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--plans/policy/api/setup.sh2
-rw-r--r--plans/policy/distribution/setup.sh4
-rw-r--r--plans/policy/pap/setup.sh31
-rw-r--r--plans/policy/pap/teardown.sh (renamed from scripts/vvp/start_vvp_sanity.sh)16
-rw-r--r--plans/policy/pap/testplan.txt (renamed from plans/vvp/sanity/testplan.txt)2
-rwxr-xr-x[-rw-r--r--]plans/sdnc/healthcheck/setup.sh11
-rw-r--r--plans/usecases/5G-bulkpm/README.txt55
-rw-r--r--plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml8
-rw-r--r--plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e19
-rw-r--r--plans/usecases/5G-bulkpm/onap.teardown.sh8
-rw-r--r--plans/usecases/5G-bulkpm/setup.sh92
-rw-r--r--plans/vvp/sanity/setup.sh36
-rw-r--r--plans/vvp/sanity/teardown.sh20
-rw-r--r--scripts/vvp/clone_and_setup_vvp_data.sh110
-rw-r--r--scripts/vvp/docker_health.sh33
-rw-r--r--scripts/vvp/kill_containers_and_remove_dataFolders.sh31
-rw-r--r--scripts/vvp/start_vvp_containers.sh93
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json17
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json11
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json8
-rw-r--r--tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json6
-rw-r--r--tests/dcaegen2/prh-testcases/resources/PrhLibrary.py8
-rw-r--r--tests/policy/api/api-test.robot10
-rw-r--r--tests/policy/distribution/distribution-test.robot4
-rw-r--r--tests/policy/pap/pap-test.robot28
-rw-r--r--tests/usecases/5G-bulkpm/BulkpmE2E.robot22
-rw-r--r--tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json2
-rw-r--r--tests/usecases/5G-bulkpm/assets/metadata.schema.json74
-rw-r--r--tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py38
-rw-r--r--tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot1
-rw-r--r--tests/vvp/sanity/__init__.robot2
-rw-r--r--tests/vvp/sanity/test1.robot19
40 files changed, 493 insertions, 392 deletions
diff --git a/plans/policy/api/setup.sh b/plans/policy/api/setup.sh
index dcfcb9ea..5ba95e9a 100644
--- a/plans/policy/api/setup.sh
+++ b/plans/policy/api/setup.sh
@@ -28,4 +28,4 @@ for i in {1..10}; do
sleep $i
done
-ROBOT_VARIABLES="-v POLICY_API_IP:${POLICY_API_IP}" \ No newline at end of file
+ROBOT_VARIABLES="-v POLICY_API_IP:${POLICY_API_IP}"
diff --git a/plans/policy/distribution/setup.sh b/plans/policy/distribution/setup.sh
index 9b894e3d..40a15d12 100644
--- a/plans/policy/distribution/setup.sh
+++ b/plans/policy/distribution/setup.sh
@@ -17,11 +17,11 @@
# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-docker run -d --name policy-distribution -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-distribution:2.0.0-SNAPSHOT-latest
+docker run -d --name policy-distribution -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-distribution:2.1.0-SNAPSHOT-latest
POLICY_DISTRIBUTION_IP=`get-instance-ip.sh policy-distribution`
echo DISTRIBUTION IP IS ${POLICY_DISTRIBUTION_IP}
-Wait for initialization
+# Wait for initialization
for i in {1..10}; do
curl -sS ${POLICY_DISTRIBUTION_IP}:6969 && break
echo sleep $i
diff --git a/plans/policy/pap/setup.sh b/plans/policy/pap/setup.sh
new file mode 100644
index 00000000..44a205a1
--- /dev/null
+++ b/plans/policy/pap/setup.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+# Copyright (C) 2019 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+docker run -d --name policy-pap -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-pap:2.0.0-SNAPSHOT-latest
+
+POLICY_PAP_IP=`get-instance-ip.sh policy-pap`
+echo PAP IP IS ${POLICY_PAP_IP}
+# Wait for initialization
+for i in {1..10}; do
+ curl -sS ${POLICY_PAP_IP}:6969 && break
+ echo sleep $i
+ sleep $i
+done
+
+ROBOT_VARIABLES="-v POLICY_PAP_IP:${POLICY_PAP_IP}"
diff --git a/scripts/vvp/start_vvp_sanity.sh b/plans/policy/pap/teardown.sh
index 1de1aaa3..877b164a 100644
--- a/scripts/vvp/start_vvp_sanity.sh
+++ b/plans/policy/pap/teardown.sh
@@ -1,26 +1,20 @@
#!/bin/bash
-#
# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
+# Copyright (C) 2019 Nordix Foundation.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
-
-# will run CI for sanity checks
+kill-instance.sh policy-pap
diff --git a/plans/vvp/sanity/testplan.txt b/plans/policy/pap/testplan.txt
index 4957ef6f..6a5aa205 100644
--- a/plans/vvp/sanity/testplan.txt
+++ b/plans/policy/pap/testplan.txt
@@ -1,3 +1,3 @@
# Test suites are relative paths under [integration/csit.git]/tests/.
# Place the suites in run order.
-vvp/sanity
+policy/pap/pap-test.robot
diff --git a/plans/sdnc/healthcheck/setup.sh b/plans/sdnc/healthcheck/setup.sh
index dfbd32cc..7a66351f 100644..100755
--- a/plans/sdnc/healthcheck/setup.sh
+++ b/plans/sdnc/healthcheck/setup.sh
@@ -24,8 +24,8 @@ export NEXUS_USERNAME=docker
export NEXUS_PASSWD=docker
export NEXUS_DOCKER_REPO=nexus3.onap.org:10001
export DMAAP_TOPIC=AUTO
-export DOCKER_IMAGE_VERSION=1.4-STAGING-latest
-export CCSDK_DOCKER_IMAGE_VERSION=0.3-STAGING-latest
+export DOCKER_IMAGE_VERSION=1.5-STAGING-latest
+export CCSDK_DOCKER_IMAGE_VERSION=0.4-STAGING-latest
export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1)
@@ -100,9 +100,8 @@ while [ "$TIME" -lt "$TIME_OUT" ]; do
docker exec sdnc_controller_container rm -f /opt/opendaylight/current/etc/host.key
response=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client system:start-level)
docker exec sdnc_controller_container rm -f /opt/opendaylight/current/etc/host.key
-num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
- if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 333 ]; then
+ if [ "$response" == "Level 100" ] ; then
echo SDNC karaf started in $TIME seconds
break;
fi
@@ -117,10 +116,8 @@ if [ "$TIME" -ge "$TIME_OUT" ]; then
fi
response=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client system:start-level)
-num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
- if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 333 ]; then
- num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
+ if [ "$response" == "Level 100" ] ; then
num_failed_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure | wc -l)
failed_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure)
echo There is/are $num_failed_bundles failed bundles out of $num_bundles installed bundles.
diff --git a/plans/usecases/5G-bulkpm/README.txt b/plans/usecases/5G-bulkpm/README.txt
new file mode 100644
index 00000000..1d0fc415
--- /dev/null
+++ b/plans/usecases/5G-bulkpm/README.txt
@@ -0,0 +1,55 @@
+###################################################################################################################
+By executing the below commands it will change the CSIT test from executing on a docker envirnoment to an ONAP one.
+###################################################################################################################
+
+1) Login to an ONAP instance,switch user and verify that the command kubectl executes before proceeding .
+# sudo -s
+# kubectl get svc -n onap| grep dcae
+
+2) Clone the csit repositry
+# git clone https://gerrit.onap.org/r/integration/csit
+
+3) Install docker-compose
+# sudo apt-get update
+# sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
+# sudo chmod +x /usr/local/bin/docker-compose
+# docker-compose --version
+
+4) Install the packages required for the RobotFramework.
+# apt install python-pip
+# sudo apt install python-pip virtualenv unzip sshuttle netcat libffi-dev libssl-dev
+# sudo pip install robotframework
+# sudo pip install -U requests
+# sudo pip install -U robotframework-requests
+
+5) Expose the Ves-collector
+# kubectl expose svc dcae-ves-collector --type=LoadBalancer --name=vesc -n onap
+ service "vesc" exposed
+
+6) Verify the Ves-collector is expose
+# kubectl get svc -n onap | grep vesc
+ vesc LoadBalancer 10.43.203.47 10.209.63.55 8080:31835/TCP 1m
+
+7) Modify the file setup.sh and make the below change
+# cd csit
+# vi plans/usecases/5G-bulkpm/setup.sh
+CSIT=TRUE
+ to
+CSIT=FALSE
+
+8) Excute the Bulk PM e2e csit.
+# ./run-csit.sh plans/usecases/5G-bulkpm/
+
+
+--> Trobleshooting
+If the Test case "Verify Default Feed And File Consumer Subscription On Datarouter" is hanging, quit the test and execute the below
+Get the DR-PROV IP address
+# kubectl -n onap -o=wide get pods | grep dmaap-dr-prov | awk '{print $6}'
+ 10.42.123.76
+Make sure there are no feeds
+# curl -k https://10.42.123.76:8443/internal/prov
+
+If there is feeds delete them
+curl -X DELETE -H "Content-Type:application/vnd.att-dr.subscription" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" -k https://10.42.123.76:8443/subs/XX
+
+Where XX is the number of the feeds in the previous command.
diff --git a/plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml b/plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml
index c5567d81..05ccb70d 100644
--- a/plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml
+++ b/plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml
@@ -1,7 +1,7 @@
version: '2.1'
services:
datarouter-prov:
- image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov
+ image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov:2.0.0-SNAPSHOT
container_name: datarouter-prov
hostname: dmaap-dr-prov
ports:
@@ -22,7 +22,7 @@ services:
retries: 5
datarouter-node:
- image: nexus3.onap.org:10001/onap/dmaap/datarouter-node
+ image: nexus3.onap.org:10001/onap/dmaap/datarouter-node:2.0.0-SNAPSHOT
container_name: datarouter-node
hostname: dmaap-dr-node
ports:
@@ -35,7 +35,7 @@ services:
condition: service_healthy
datarouter-subscriber:
- image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber
+ image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.0-SNAPSHOT
container_name: fileconsumer-node
hostname: subscriber.com
ports:
@@ -70,7 +70,7 @@ services:
dfc:
container_name: dfc
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:latest
+ image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:1.1.1
ports:
- "8433:8433"
diff --git a/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e b/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e
new file mode 100644
index 00000000..1c05ca5b
--- /dev/null
+++ b/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e
@@ -0,0 +1,19 @@
+version: '2.1'
+services:
+ datarouter-subscriber:
+ image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.0-SNAPSHOT
+ container_name: fileconsumer-node
+ hostname: subscriber.com
+ ports:
+ - "7070:7070"
+ volumes:
+ - ../subscriber_data/subscriber.properties:/opt/app/subscriber/etc/subscriber.properties
+
+ sftp:
+ container_name: sftp
+ image: atmoz/sftp
+ ports:
+ - "2222:22"
+ volumes:
+ - /host/upload:/home/admin
+ command: admin:admin:1001 \ No newline at end of file
diff --git a/plans/usecases/5G-bulkpm/onap.teardown.sh b/plans/usecases/5G-bulkpm/onap.teardown.sh
new file mode 100644
index 00000000..966be45d
--- /dev/null
+++ b/plans/usecases/5G-bulkpm/onap.teardown.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+echo "Starting teardown script"
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+kubectl -n onap exec $DFC_POD -it cat /opt/log/application.log > /tmp/dfc_docker.log
+cat /tmp/dfc_docker.log
+sleep 3
+kill-instance.sh fileconsumer-node
+kill-instance.sh sftp \ No newline at end of file
diff --git a/plans/usecases/5G-bulkpm/setup.sh b/plans/usecases/5G-bulkpm/setup.sh
index 47e2532f..5f3c4a3c 100644
--- a/plans/usecases/5G-bulkpm/setup.sh
+++ b/plans/usecases/5G-bulkpm/setup.sh
@@ -2,6 +2,18 @@
# Place the scripts in run order:
source ${SCRIPTS}/common_functions.sh
+CSIT=TRUE
+if [ ${CSIT} = "TRUE" ] ; then
+####################################################
+#Executes the below setup in an Docker Environment #
+####################################################
+
+echo "CSIT Test get executed in here"
+SFTP_PORT=22
+VESC_PORT=8080
+export VESC_PORT=${VESC_PORT}
+export CLI_EXEC_CLI_DFC="docker exec dfc /bin/sh -c \"ls /target | grep .gz\""
+
# Clone DMaaP Message Router repo
mkdir -p $WORKSPACE/archives/dmaapmr
cd $WORKSPACE/archives/dmaapmr
@@ -116,7 +128,7 @@ export HOST_IP=${HOST_IP}
export DMAAP_MR_IP=${DMAAP_MR_IP}
#Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP}"
+ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v VESC_PORT:${VESC_PORT} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP}"
pip install jsonschema uuid
# Wait container ready
@@ -141,6 +153,7 @@ docker exec dfc /bin/sh -c "echo '${DR_NODE_IP}' dmaap-dr-node >> /etc/hosts"
# Update the File Ready Notification with actual sftp ip address and copy pm files to sftp server.
cp $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
sed -i 's/sftpserver/'${SFTP_IP}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpport/'${SFTP_PORT}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
docker cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/xNF.pm.xml.gz sftp:/home/admin/
# Data Router Configuration:
@@ -151,3 +164,80 @@ sed -i 's/fileconsumer/'${DR_SUBSCIBER_IP}'/g' /tmp/addSubscriber.json
curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
sleep 10
curl -k https://$DR_PROV_IP:8443/internal/prov
+
+else
+############################################################
+############################################################
+# Executes the below setup in an ONAP Environment #
+# Make sure the steps in the README are completed first !! #
+############################################################
+############################################################
+SFTP_PORT=2222
+
+cp $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh.orig
+cp $WORKSPACE/plans/usecases/5G-bulkpm/onap.teardown.sh $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh
+
+#Get DataFileCollector POD name in this ONAP Deployment
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+export DFC_POD=${DFC_POD}
+export CLI_EXEC_CLI_DFC="kubectl exec -n onap ${DFC_POD} -it ls /target | grep .gz"
+
+# Get IP address of datarrouter-prov
+DR_PROV_IP=$(kubectl -n onap -o wide get pods | grep dmaap-dr-prov | awk '{print $6}')
+echo DR_PROV_IP=${DR_PROV_IP}
+DR_NODE_IP=$(kubectl -n onap -o=wide get pods | grep dmaap-dr-node | awk '{print $6}')
+echo DR_NODE_IP=${DR_NODE_IP}
+
+# Get IP address of exposed Ves and its port
+DMAAP_MR_IP=$(kubectl -n onap -o=wide get pods | grep dev-dmaap-message-router | grep -Ev "kafka|zookeeper" | awk '{print $6}')
+VESC_IP=$(kubectl get svc -n onap | grep vesc | awk '{print $4}')
+VESC_PORT=$(kubectl get svc -n onap | grep vesc | awk '{print $5}' | cut -d ":" -f2 | cut -d "/" -f1)
+echo VESC_IP=${VESC_IP}
+echo VESC_PORT=${VESC_PORT}
+
+export VESC_IP=${VESC_IP}
+export VESC_PORT=${VESC_PORT}
+export HOST_IP=${HOST_IP}
+export DMAAP_MR_IP=${DMAAP_MR_IP}
+
+#Get DataFileCollector POD name in this ONAP Deployment
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+export DFC_POD=${DFC_POD}
+
+pip install jsonschema uuid
+
+# Clone DMaaP Data Router repo
+mkdir -p $WORKSPACE/archives/dmaapdr
+cd $WORKSPACE/archives/dmaapdr
+git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master
+cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources
+mkdir docker-compose
+cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources/docker-compose
+cp $WORKSPACE/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources/docker-compose/docker-compose.yml
+
+#Statup the SFTP and FileConsumer containers.
+docker login -u docker -p docker nexus3.onap.org:10001
+docker-compose up -d
+
+# Wait container ready
+sleep 2
+HOST_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
+# SFTP Configuration:
+# Update the File Ready Notification with actual sftp ip address and copy pm files to sftp server.
+cp $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpserver/'${HOST_IP}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpport/'${SFTP_PORT}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+docker cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/xNF.pm.xml.gz sftp:/home/admin/
+
+# Create default feed and create file consumer subscriber on data router
+curl -v -X POST -H "Content-Type:application/vnd.att-dr.feed" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @$WORKSPACE/plans/usecases/5G-bulkpm/assets/createFeed.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443
+cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/addSubscriber.json /tmp/addSubscriber.json
+sed -i 's/fileconsumer/'${HOST_IP}'/g' /tmp/addSubscriber.json
+curl -v -X POST -H "Content-Type:application/vnd.att-dr.subscription" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
+sleep 10
+curl -k https://$DR_PROV_IP:8443/internal/prov
+
+#Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v VESC_PORT:${VESC_PORT} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP} -v DFC_POD:${DFC_POD} -v HOST_IP:${HOST_IP} "
+
+fi; \ No newline at end of file
diff --git a/plans/vvp/sanity/setup.sh b/plans/vvp/sanity/setup.sh
deleted file mode 100644
index 12bb6011..00000000
--- a/plans/vvp/sanity/setup.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-# Place the scripts in run order:
-
-
-source ${WORKSPACE}/scripts/vvp/clone_and_setup_vvp_data.sh
-
-source ${WORKSPACE}/scripts/vvp/start_vvp_containers.sh
-
-source ${WORKSPACE}/scripts/vvp/docker_health.sh
-
-source ${WORKSPACE}/scripts/vvp/start_vvp_sanity.sh
-
-
-VVP_IP=`${WORKSPACE}/scripts/get-instance-ip.sh vvp-engagementmgr`
-echo VVP_IP=${VVP_IP}
-
-
-# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v VVP_IP:${VVP_IP}"
diff --git a/plans/vvp/sanity/teardown.sh b/plans/vvp/sanity/teardown.sh
deleted file mode 100644
index 3369c029..00000000
--- a/plans/vvp/sanity/teardown.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-
-source ${WORKSPACE}/scripts/vvp/kill_containers_and_remove_dataFolders.sh
diff --git a/scripts/vvp/clone_and_setup_vvp_data.sh b/scripts/vvp/clone_and_setup_vvp_data.sh
deleted file mode 100644
index 866a82e6..00000000
--- a/scripts/vvp/clone_and_setup_vvp_data.sh
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/clone_and_setup_vvp_data.sh"
-
-# Clone vvp enviroment template
-mkdir -p ${WORKSPACE}/data/environments/
-mkdir -p ${WORKSPACE}/data/clone/
-mkdir -p /opt/configmaps/settings/
-
-cd ${WORKSPACE}/data/clone
-git clone --depth 1 http://gerrit.onap.org/r/vvp/engagementmgr -b master
-
-chmod -R 775 ${WORKSPACE}/data/
-
-# copy settings file from tox environment infrastructure:
-cp -f ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings/tox_settings.py /opt/configmaps/settings/__init__.py
-
-# uwsgi.ini file creation
-echo "[uwsgi]
-http = :80
-plugin = python
-chdir = /srv
-module = vvp.wsgi:application
-master = True
-pidfile = /tmp/project-master.pid
-vacuum = True
-max-requests = 5000
-enable-threads = True
-stats = 0.0.0.0:9000
-stats-http = True" > /opt/configmaps/settings/uwsgi.ini
-
-# storage.py file creation
-echo "from storages.backends.s3boto import S3BotoStorage
-from django.conf import settings
-class S3StaticStorage(S3BotoStorage):
- custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.STATIC_BUCKET)
- bucket_name = settings.STATIC_BUCKET
-class S3MediaStorage(S3BotoStorage):
- custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.MEDIA_BUCKET)
- bucket_name = settings.MEDIA_BUCKET" > /opt/configmaps/settings/storage.py
-
-# envbool.py file creation
-echo "import os
-def envbool(key, default=False, unknown=True):
- return {'true': True, '1': True, 'false': False, '0': False,
- '': default,}.get(os.getenv(key, '').lower(), unknown)" > /opt/configmaps/settings/envbool.py
-
-# vvp_env.list file creation
-echo "# set enviroment variables
-OAUTHLIB_INSECURE_TRANSPORT=1
-HOST_IP=${IP}
-ENVNAME=${ENVIRONMENT}
-http_proxy=${http_proxy}
-https_proxy=${https_proxy}
-no_proxy=${no_proxy}
-DJANGO_SETTINGS_MODULE=vvp.settings
-# export PYTHONPATH={pwd}
-SECRET_KEY=6mo22&FAKEFALEFALEFKEuq0u*4ksk^aq8lte&)yul
-ENVIRONMENT=development
-SERVICE_PROVIDER=ExampleProvider
-PROGRAM_NAME=VVP
-PROGRAM_NAME_URL_PREFIX=vvp
-SERVICE_PROVIDER_DOMAIN=example-domain.com
-EMAIL_HOST=localhost
-EMAIL_HOST_PASSWORD=
-EMAIL_HOST_USER=
-EMAIL_PORT=25
-PGDATABASE=icedb
-PGUSER=iceuser
-PGPASSWORD=Aa123456
-PGHOST=localhost
-PGPORT=5433
-SECRET_WEBHOOK_TOKEN=AiwiFAKEFAKEFAKEmahch2zahshaGi
-SECRET_GITLAB_AUTH_TOKEN=ieNgFAKEFAKE4zohvee9a
-SECRET_JENKINS_PASSWORD=xaiyiFAKEFAKEqueuBu
-SECRET_CMS_APP_CLIENT_ID=MHmJo0ccDhFAKEFAKEFAKEPAC6H6HAMzhCCM16
-SECRET_CMS_APP_CLIENT_SECRET=nI8QFAKEEEpnw5nTs
-SLACK_API_TOKEN=
-S3_HOST=localhost
-S3_PORT=443
-AWS_ACCESS_KEY_ID=FD2FAKEFAKEFAKEVD1MWRN
-AWS_SECRET_ACCESS_KEY=TKoiwxzFAKEFAKEFAKEFAKEFAKEQ27nP2lCiutEsD
-STATIC_ROOT=/app/htdocs" > ${WORKSPACE}/data/environments/vvp_env.list
-
-ifconfig
-
-IP_ADDRESS=`ip route get 8.8.8.8 | awk '/src/{ print $7 }'`
-export HOST_IP=$IP_ADDRESS
diff --git a/scripts/vvp/docker_health.sh b/scripts/vvp/docker_health.sh
deleted file mode 100644
index 520b2dc3..00000000
--- a/scripts/vvp/docker_health.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "VVP-Engagement-Manager health-Check:"
-echo ""
-echo ""
-res=`curl -s -X GET -H "Accept: application/json" -H "Content-Type: application/json" "http://localhost:9090/vvp/v1/engmgr/vendors" | wc -w`
-if [ ${res} == 0 ]; then
- echo "Error [${res}] while performing vvp engagement manager vendor existance check"
- exit 1
-fi
-echo "check vvp engagement manager vendor existance: OK [${res}]"
diff --git a/scripts/vvp/kill_containers_and_remove_dataFolders.sh b/scripts/vvp/kill_containers_and_remove_dataFolders.sh
deleted file mode 100644
index 38bd3319..00000000
--- a/scripts/vvp/kill_containers_and_remove_dataFolders.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/kill_and_remove_dataFolder.sh"
-
-
-CONTAINER_NAME="vvp-engagementmgr"
-
-#kill and remove all vvp dockers
-docker stop $CONTAINER_NAME
-docker rm -f $CONTAINER_NAME
-
-
-#delete data folder
-rm -rf ${WORKSPACE}/data/*
diff --git a/scripts/vvp/start_vvp_containers.sh b/scripts/vvp/start_vvp_containers.sh
deleted file mode 100644
index cafc040c..00000000
--- a/scripts/vvp/start_vvp_containers.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/start_vvp_containers.sh"
-
-export IP=$HOST_IP
-export PREFIX='nexus3.onap.org:10001/openecomp/vvp'
-export RELEASE='latest'
-
-#start Engagement Manager pod:
-docker run \
---detach \
---entrypoint="" \
---name vvp-engagementmgr \
---env-file ${WORKSPACE}/data/environments/vvp_env.list \
---log-driver=json-file \
---log-opt max-size=100m \
---log-opt max-file=10 \
---ulimit memlock=-1:-1 \
---memory 4g \
---memory-swap=4g \
---ulimit nofile=4096:100000 \
---volume /etc/localtime:/etc/localtime:ro \
---volume /opt/configmaps/settings:/opt/configmaps/settings/ \
---publish 9090:80 ${PREFIX}/engagementmgr:${RELEASE}
-
-docker cp /opt/configmaps/settings/uwsgi.ini vvp-engagementmgr:/srv/vvp/settings/
-
-echo "please wait while Engagement Manager is starting..."
-echo ""
-c=60 # seconds to wait
-REWRITE="\e[25D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
-
-#run migration again:
-docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py migrate"
-
-#run initial populate db again:
-docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py initial_populate_db"
-
-
-echo "Will copy the generated DB sqlite3 file into the application directory in 30 seconds..."
-sleep 30
-#copy the generated DB sqlite3 file into the application directory:
-docker exec -d vvp-engagementmgr sh -c "cp emdb.db /srv/emdb.db -f"
-
-TIME_OUT=600
-INTERVAL=5
-TIME=0
-while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:9090/vvp/v1/engmgr/vendors); echo $response
-
- if [ "$response" == "200" ]; then
- echo VVP-Engagement-Manager well started in $TIME seconds
- break;
- fi
-
- echo Sleep: $INTERVAL seconds before testing if VVP-Engagement-Manager is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
- sleep $INTERVAL
- TIME=$(($TIME+$INTERVAL))
-done
-
-if [ "$TIME" -ge "$TIME_OUT" ]; then
- echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
-else
- echo "Done starting vvp containers!"
-fi
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json
index 2ffe356f..cdcab678 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"10.17.123.234",
- "oamV6IpAddress":""
+ "oamV6IpAddress":"",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json
index c4a0e727..f5ec23d0 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"",
- "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334"
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json
index 16963e1b..bc9cb1d3 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"10.16.123.234",
- "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json
new file mode 100644
index 00000000..7ca4d0c6
--- /dev/null
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json
@@ -0,0 +1,17 @@
+{
+ "event": {
+ "commonEventHeader": {
+ "sourceName":"NOK6061ZW1"
+ },
+ "pnfRegistrationFields": {
+ "oamV4IpAddress":"10.16.123.234",
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "serial-number":"",
+ "equip-vendor":"",
+ "equip-model":"",
+ "equip-type":"",
+ "nf-role":"",
+ "sw-version":""
+ }
+ }
+}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json
index 1e3afa9d..4942a3d5 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"",
- "oamV6IpAddress":""
+ "oamV6IpAddress":"",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json
new file mode 100644
index 00000000..16963e1b
--- /dev/null
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json
@@ -0,0 +1,11 @@
+{
+ "event": {
+ "commonEventHeader": {
+ "sourceName":"NOK6061ZW1"
+ },
+ "pnfRegistrationFields": {
+ "oamV4IpAddress":"10.16.123.234",
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"
+ }
+ }
+}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json
index 126987fd..5e2a6121 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"10.18.123.234",
- "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334"
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json
index de1f576c..a6a6f364 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"",
- "oamV6IpAddress":""
+ "oamV6IpAddress":"",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json
index 4838f1b0..6d9eadfe 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"",
- "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334"
+ "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json
index 04ab7ceb..9fac5b19 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json
@@ -5,7 +5,13 @@
},
"pnfRegistrationFields": {
"oamV4IpAddress":"10.17.163.234",
- "oamV6IpAddress":""
+ "oamV6IpAddress":"",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json
index 0aa0372c..a416bb7e 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json
@@ -4,7 +4,13 @@
"sourceName":"NOK6061ZW9"
},
"pnfRegistrationFields": {
- "oamV4IpAddress":"10.17.123.24"
+ "oamV4IpAddress":"10.17.123.24",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion"
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json b/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json
index c87e188f..08d9a49c 100644
--- a/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json
+++ b/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json
@@ -6,6 +6,12 @@
"pnfRegistrationFields": {
"oamV4IpAddress":"10.16.123.234",
"oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+ "serial-number":"NOkkaaa123",
+ "equip-vendor":"equipVendor",
+ "equip-model":"equipModel",
+ "equip-type":"equipType",
+ "nf-role":"nf-role",
+ "sw-version":"swVersion",
}
}
}
diff --git a/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py b/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
index dc589369..d413be58 100644
--- a/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
+++ b/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
@@ -24,8 +24,14 @@ class PrhLibrary(object):
json_to_python = json.loads(json_file)
ipv4 = json_to_python.get("event").get("pnfRegistrationFields").get("oamV4IpAddress")
ipv6 = json_to_python.get("event").get("pnfRegistrationFields").get("oamV6IpAddress") if "oamV6IpAddress" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ serial_number = json_to_python.get("event").get("pnfRegistrationFields").get("serial-number") if "serial-number" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ equip_vendor = json_to_python.get("event").get("pnfRegistrationFields").get("equip-vendor") if "equip-vendor" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ equip_model = json_to_python.get("event").get("pnfRegistrationFields").get("equip-model") if "equip-model" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ equip_type = json_to_python.get("event").get("pnfRegistrationFields").get("equip-type") if "equip-type" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ nf_role = json_to_python.get("event").get("pnfRegistrationFields").get("nf-role") if "nf-role" in json_to_python["event"]["pnfRegistrationFields"] else ""
+ sw_version = json_to_python.get("event").get("pnfRegistrationFields").get("sw-version") if "sw-version" in json_to_python["event"]["pnfRegistrationFields"] else ""
correlation_id = json_to_python.get("event").get("commonEventHeader").get("sourceName")
- str_json = '{"correlationId":"' + correlation_id + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}'
+ str_json = '{"correlationId":"' + correlation_id + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '","serial-number":"' + serial_number + '","equip-vendor":"' + equip_vendor + '","equip-model":"' + equip_model + '","equip-type":"' + equip_type + '","nf-role":"' + nf_role + '","sw-version":"' + sw_version + '"}'
python_to_json = json.dumps(str_json)
return python_to_json.replace("\\", "")[1:-1]
diff --git a/tests/policy/api/api-test.robot b/tests/policy/api/api-test.robot
index 3753b3d8..7ea24738 100644
--- a/tests/policy/api/api-test.robot
+++ b/tests/policy/api/api-test.robot
@@ -8,8 +8,8 @@ Library json
Healthcheck
[Documentation] Runs Policy Api Health check
${auth}= Create List healthcheck zb!XztG34
- Log Creating session http://${POLICY_API_IP}:6969
- ${session}= Create Session policy http://${POLICY_API_IP}:6969 auth=${auth}
+ Log Creating session https://${POLICY_API_IP}:6969
+ ${session}= Create Session policy https://${POLICY_API_IP}:6969 auth=${auth}
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
${resp}= Get Request policy /healthcheck headers=${headers}
Log Received response from policy ${resp.text}
@@ -19,10 +19,10 @@ Healthcheck
Statistics
[Documentation] Runs Policy Api Statistics
${auth}= Create List healthcheck zb!XztG34
- Log Creating session http://${POLICY_API_IP}:6969
- ${session}= Create Session policy http://${POLICY_API_IP}:6969 auth=${auth}
+ Log Creating session https://${POLICY_API_IP}:6969
+ ${session}= Create Session policy https://${POLICY_API_IP}:6969 auth=${auth}
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
${resp}= Get Request policy /statistics headers=${headers}
Log Received response from policy ${resp.text}
Should Be Equal As Strings ${resp.status_code} 200
- Should Be Equal As Strings ${resp.json()['code']} 200 \ No newline at end of file
+ Should Be Equal As Strings ${resp.json()['code']} 200
diff --git a/tests/policy/distribution/distribution-test.robot b/tests/policy/distribution/distribution-test.robot
index 1b9fa212..2ee11806 100644
--- a/tests/policy/distribution/distribution-test.robot
+++ b/tests/policy/distribution/distribution-test.robot
@@ -8,8 +8,8 @@ Library json
Healthcheck
[Documentation] Runs Policy Distribution Health check
${auth}= Create List healthcheck zb!XztG34
- Log Creating session http://${POLICY_DISTRIBUTION_IP}:6969
- ${session}= Create Session policy http://${POLICY_DISTRIBUTION_IP}:6969 auth=${auth}
+ Log Creating session https://${POLICY_DISTRIBUTION_IP}:6969
+ ${session}= Create Session policy https://${POLICY_DISTRIBUTION_IP}:6969 auth=${auth}
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
${resp}= Get Request policy /healthcheck headers=${headers}
Log Received response from policy ${resp.text}
diff --git a/tests/policy/pap/pap-test.robot b/tests/policy/pap/pap-test.robot
new file mode 100644
index 00000000..7dca5b4d
--- /dev/null
+++ b/tests/policy/pap/pap-test.robot
@@ -0,0 +1,28 @@
+*** Settings ***
+Library Collections
+Library RequestsLibrary
+Library OperatingSystem
+Library json
+
+*** Test Cases ***
+Healthcheck
+ [Documentation] Runs Policy PAP Health check
+ ${auth}= Create List healthcheck zb!XztG34
+ Log Creating session https://${POLICY_PAP_IP}:6969
+ ${session}= Create Session policy https://${POLICY_PAP_IP}:6969 auth=${auth}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request policy /healthcheck headers=${headers}
+ Log Received response from policy ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['code']} 200
+
+Statistics
+ [Documentation] Runs Policy PAP Statistics
+ ${auth}= Create List healthcheck zb!XztG34
+ Log Creating session https://${POLICY_PAP_IP}:6969
+ ${session}= Create Session policy https://${POLICY_PAP_IP}:6969 auth=${auth}
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ ${resp}= Get Request policy /statistics headers=${headers}
+ Log Received response from policy ${resp.text}
+ Should Be Equal As Strings ${resp.status_code} 200
+ Should Be Equal As Strings ${resp.json()['code']} 200
diff --git a/tests/usecases/5G-bulkpm/BulkpmE2E.robot b/tests/usecases/5G-bulkpm/BulkpmE2E.robot
index fcc1cc76..4b85e6ba 100644
--- a/tests/usecases/5G-bulkpm/BulkpmE2E.robot
+++ b/tests/usecases/5G-bulkpm/BulkpmE2E.robot
@@ -8,7 +8,7 @@ Resource resources/bulkpm_keywords.robot
*** Variables ***
-${VESC_URL} http://%{VESC_IP}:8080
+${VESC_URL} http://%{VESC_IP}:%{VESC_PORT}
${GLOBAL_APPLICATION_ID} robot-ves
${VES_ANY_EVENT_PATH} /eventListener/v7
${HEADER_STRING} content-type=application/json
@@ -17,9 +17,13 @@ ${EVENT_DATA_FILE} %{WORKSPACE}/tests/usecases/5G-bulkpm/a
${TARGETURL_TOPICS} http://${DMAAP_MR_IP}:3904/topics
${TARGETURL_SUBSCR} http://${DMAAP_MR_IP}:3904/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12?timeout=1000
${CLI_EXEC_CLI} curl -k https://${DR_PROV_IP}:8443/internal/prov
-${CLI_EXEC_CLI_DFC} docker exec dfc /bin/sh -c "ls /target | grep .gz"
${CLI_EXEC_CLI_FILECONSUMER} docker exec fileconsumer-node /bin/sh -c "ls /opt/app/subscriber/delivery | grep .gz"
+${CLI_EXEC_CLI_FILECONSUMER_CP} docker cp fileconsumer-node:/opt/app/subscriber/delivery/xNF.pm.xml.gz.M %{WORKSPACE}
+${CLI_EXEC_RENAME_METADATA} mv %{WORKSPACE}/xNF.pm.xml.gz.M %{WORKSPACE}/metadata.json
+${metadataSchemaPath} %{WORKSPACE}/tests/usecases/5G-bulkpm/assets/metadata.schema.json
+${metadataJsonPath} %{WORKSPACE}/metadata.json
+
*** Test Cases ***
Send VES File Ready Event to VES Collector
@@ -55,7 +59,7 @@ Check VES Notification Topic is existing in Message Router
Verify Downloaded PM file from xNF exist on Data File Collector
[Tags] Bulk_PM_E2E_03
[Documentation] Check the PM XML file exists on the data file collector
- ${cli_cmd_output}= Run Process ${CLI_EXEC_CLI_DFC} shell=yes
+ ${cli_cmd_output}= Run Process %{CLI_EXEC_CLI_DFC} shell=yes
Log ${cli_cmd_output.stdout}
Should Be Equal As Strings ${cli_cmd_output.rc} 0
Should Contain ${cli_cmd_output.stdout} xNF.pm.xml.gz
@@ -78,3 +82,15 @@ Verify Fileconsumer Receive PM file from Data Router
Log ${cli_cmd_output.stdout}
Should Be Equal As Strings ${cli_cmd_output.rc} 0
Should Contain ${cli_cmd_output.stdout} xNF.pm.xml.gz
+
+Verify File Consumer Receive valid metadata from Data Router
+ [Tags] Bulk_PM_E2E_06
+ [Documentation] Check PM XML file is delivered to the FileConsumer Simulator with valid metadata
+ ${cli_cmd_output}= Run Process ${CLI_EXEC_CLI_FILECONSUMER} shell=yes
+ Log ${cli_cmd_output.stdout}
+ Should Be Equal As Strings ${cli_cmd_output.rc} 0
+ Should Contain ${cli_cmd_output.stdout} xNF.pm.xml.gz.M
+ ${cli_cmd_output}= Run Process ${CLI_EXEC_CLI_FILECONSUMER_CP} shell=yes
+ ${cli_cmd_output}= Run Process ${CLI_EXEC_RENAME_METADATA} shell=yes
+ ${validation_result}= Validate ${metadataSchemaPath} ${metadataJsonPath}
+ Should Be Equal As Strings ${validation_result} 0
diff --git a/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json b/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json
index 4064ea31..d5d8fd05 100644
--- a/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json
+++ b/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json
@@ -21,7 +21,7 @@
"arrayOfNamedHashMap": [
{ "name": "xNF.pm.xml.gz",
"hashMap":{
- "location": "sftp://admin:admin@sftpserver:22/xNF.pm.xml.gz",
+ "location": "sftp://admin:admin@sftpserver:sftpport/xNF.pm.xml.gz",
"compression": "gzip",
"fileFormatType": "org.3GPP.32.435#measCollec",
"fileFormatVersion": "V10"
diff --git a/tests/usecases/5G-bulkpm/assets/metadata.schema.json b/tests/usecases/5G-bulkpm/assets/metadata.schema.json
new file mode 100644
index 00000000..a41b3544
--- /dev/null
+++ b/tests/usecases/5G-bulkpm/assets/metadata.schema.json
@@ -0,0 +1,74 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema",
+ "$id": "metadata.schema.json",
+ "title": "DataRouter PM File Metadata",
+ "description": "Metadata for 3GPP PM files that are placed on the DMaaP Data Router by the Data File Collector (VES 7.1)",
+ "type": "object",
+
+ "properties": {
+ "productName": {
+ "description": "from the FileReady event eventName",
+ "type": "string"
+ },
+
+ "vendorName": {
+ "description": "from the FileReady event eventName",
+ "type": "string"
+ },
+
+ "lastEpochMicrosec": {
+ "description": "the latest unix epoch time associated with the FileReady event",
+ "type": "string"
+ },
+
+ "sourceName": {
+ "description": "the name of the entity experiencing the event",
+ "type": "string"
+ },
+
+ "startEpochMicrosec": {
+ "description": "the earliest unix epoch time associated with the FileReady event",
+ "type": "string"
+ },
+
+ "timeZoneOffset": {
+ "description": "the timezone offset from UTC",
+ "type": "string",
+ "pattern": "^(?:(?:[a-zA-Z]{3})[+-](?:[01][0-9]|2[0-3]).[0-5][0-9])$"
+ },
+
+ "location": {
+ "description": "follows the format <protocol>://<ip address>:<port>/<path>/<filename>, the IP address is the node ip address, the port of the protocol server",
+ "type": "string"
+ },
+
+ "compression": {
+ "description": "specifies if the file is compressed",
+ "type": "string",
+ "enum": [ "gzip" ]
+ },
+
+ "fileFormatType": {
+ "description": "the file format",
+ "type": "string"
+ },
+
+ "fileFormatVersion": {
+ "description": "the version of the file format",
+ "type": "string"
+ }
+ },
+
+ "required": [
+ "productName",
+ "vendorName",
+ "lastEpochMicrosec",
+ "sourceName",
+ "startEpochMicrosec",
+ "timeZoneOffset",
+ "location",
+ "compression",
+ "fileFormatType",
+ "fileFormatVersion"
+ ]
+}
diff --git a/tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py b/tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py
new file mode 100644
index 00000000..12d5d856
--- /dev/null
+++ b/tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import sys
+import logging
+from simplejson import load
+from jsonschema import validate, ValidationError, SchemaError
+
+
+class JsonValidatorLibrary(object):
+
+ def __init__(self):
+ pass
+
+ def validate(self, schemaPath, jsonPath):
+ logging.info("Schema path: " + schemaPath)
+ logging.info("JSON path: " + jsonPath)
+ schema = None
+ data = None
+ try:
+ schema = load(open(schemaPath, 'r'))
+ data = load(open(jsonPath, 'r'))
+ except (IOError, ValueError, OSError) as e:
+ logging.error(e.message)
+ return 1
+
+ try:
+ validate(data, schema)
+ except (ValidationError, SchemaError) as e:
+ logging.error(e.message)
+ return 1
+
+ # logger.log("JSON validation successful")
+ print("JSON validation successful")
+ return 0
+
+if __name__ == '__main__':
+ lib = JsonValidatorLibrary()
+ # sys.exit(JsonValidatorLibrary().validate(sys.argv[1], sys.argv[2]))
diff --git a/tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot b/tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot
index 6859ea0d..9ef56c83 100644
--- a/tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot
+++ b/tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot
@@ -2,6 +2,7 @@
Documentation The main interface for interacting with VES. It handles low level stuff like managing the http request library and VES required fields
Library RequestsLibrary
Library ../resources/xNFLibrary.py
+Library ../resources/JsonValidatorLibrary.py
Library OperatingSystem
Library Collections
Library requests
diff --git a/tests/vvp/sanity/__init__.robot b/tests/vvp/sanity/__init__.robot
deleted file mode 100644
index 6bc0362e..00000000
--- a/tests/vvp/sanity/__init__.robot
+++ /dev/null
@@ -1,2 +0,0 @@
-*** Settings ***
-Documentation VVP - HealthCheck
diff --git a/tests/vvp/sanity/test1.robot b/tests/vvp/sanity/test1.robot
deleted file mode 100644
index 27612fdb..00000000
--- a/tests/vvp/sanity/test1.robot
+++ /dev/null
@@ -1,19 +0,0 @@
-*** Settings ***
-Library Collections
-Library OperatingSystem
-Library RequestsLibrary
-Library json
-
-# http://localhost:9090/vvp/v1/engmgr/vendors
-# vvp-engagementmgr
-
-*** Test Cases ***
-Get Requests health check ok
- [Tags] get
- CreateSession vvp-engagementmgr http://localhost:9090
- ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
- ${resp}= Get Request vvp-engagementmgr /vvp/v1/engmgr/vendors headers=&{headers}
- Should Be Equal As Strings ${resp.status_code} 200
- @{ITEMS}= Copy List ${resp.json()}
- : FOR ${ELEMENT} IN @{ITEMS}
- \ Log ${ELEMENT['uuid']} ${ELEMENT['name']}